first save
This commit is contained in:
326
.venv/lib/python3.12/site-packages/stdlib/VERSIONS
Normal file
326
.venv/lib/python3.12/site-packages/stdlib/VERSIONS
Normal file
@@ -0,0 +1,326 @@
|
||||
# The structure of this file is as follows:
|
||||
# - Blank lines and comments starting with `#` are ignored.
|
||||
# - Lines contain the name of a module, followed by a colon,
|
||||
# a space, and a version range (for example: `symbol: 3.0-3.9`).
|
||||
#
|
||||
# Version ranges may be of the form "X.Y-A.B" or "X.Y-". The
|
||||
# first form means that a module was introduced in version X.Y and last
|
||||
# available in version A.B. The second form means that the module was
|
||||
# introduced in version X.Y and is still available in the latest
|
||||
# version of Python.
|
||||
#
|
||||
# If a submodule is not listed separately, it has the same lifetime as
|
||||
# its parent module.
|
||||
#
|
||||
# Python versions before 3.0 are ignored, so any module that was already
|
||||
# present in 3.0 will have "3.0" as its minimum version. Version ranges
|
||||
# for unsupported versions of Python 3 are generally accurate but we do
|
||||
# not guarantee their correctness.
|
||||
|
||||
__future__: 3.0-
|
||||
__main__: 3.0-
|
||||
_ast: 3.0-
|
||||
_asyncio: 3.0-
|
||||
_bisect: 3.0-
|
||||
_blake2: 3.6-
|
||||
_bootlocale: 3.4-3.9
|
||||
_codecs: 3.0-
|
||||
_collections_abc: 3.3-
|
||||
_compat_pickle: 3.1-
|
||||
_compression: 3.5-
|
||||
_csv: 3.0-
|
||||
_ctypes: 3.0-
|
||||
_curses: 3.0-
|
||||
_decimal: 3.3-
|
||||
_dummy_thread: 3.0-3.8
|
||||
_dummy_threading: 3.0-3.8
|
||||
_frozen_importlib: 3.0-
|
||||
_frozen_importlib_external: 3.5-
|
||||
_heapq: 3.0-
|
||||
_imp: 3.0-
|
||||
_interpchannels: 3.13-
|
||||
_interpqueues: 3.13-
|
||||
_interpreters: 3.13-
|
||||
_io: 3.0-
|
||||
_json: 3.0-
|
||||
_locale: 3.0-
|
||||
_lsprof: 3.0-
|
||||
_markupbase: 3.0-
|
||||
_msi: 3.0-3.12
|
||||
_operator: 3.4-
|
||||
_osx_support: 3.0-
|
||||
_posixsubprocess: 3.2-
|
||||
_py_abc: 3.7-
|
||||
_pydecimal: 3.5-
|
||||
_random: 3.0-
|
||||
_sitebuiltins: 3.4-
|
||||
_socket: 3.0- # present in 3.0 at runtime, but not in typeshed
|
||||
_sqlite3: 3.0-
|
||||
_ssl: 3.0-
|
||||
_stat: 3.4-
|
||||
_thread: 3.0-
|
||||
_threading_local: 3.0-
|
||||
_tkinter: 3.0-
|
||||
_tracemalloc: 3.4-
|
||||
_typeshed: 3.0- # not present at runtime, only for type checking
|
||||
_warnings: 3.0-
|
||||
_weakref: 3.0-
|
||||
_weakrefset: 3.0-
|
||||
_winapi: 3.3-
|
||||
abc: 3.0-
|
||||
aifc: 3.0-3.12
|
||||
antigravity: 3.0-
|
||||
argparse: 3.0-
|
||||
array: 3.0-
|
||||
ast: 3.0-
|
||||
asynchat: 3.0-3.11
|
||||
asyncio: 3.4-
|
||||
asyncio.exceptions: 3.8-
|
||||
asyncio.format_helpers: 3.7-
|
||||
asyncio.mixins: 3.10-
|
||||
asyncio.runners: 3.7-
|
||||
asyncio.staggered: 3.8-
|
||||
asyncio.taskgroups: 3.11-
|
||||
asyncio.threads: 3.9-
|
||||
asyncio.timeouts: 3.11-
|
||||
asyncio.trsock: 3.8-
|
||||
asyncore: 3.0-3.11
|
||||
atexit: 3.0-
|
||||
audioop: 3.0-3.12
|
||||
base64: 3.0-
|
||||
bdb: 3.0-
|
||||
binascii: 3.0-
|
||||
binhex: 3.0-3.10
|
||||
bisect: 3.0-
|
||||
builtins: 3.0-
|
||||
bz2: 3.0-
|
||||
cProfile: 3.0-
|
||||
calendar: 3.0-
|
||||
cgi: 3.0-3.12
|
||||
cgitb: 3.0-3.12
|
||||
chunk: 3.0-3.12
|
||||
cmath: 3.0-
|
||||
cmd: 3.0-
|
||||
code: 3.0-
|
||||
codecs: 3.0-
|
||||
codeop: 3.0-
|
||||
collections: 3.0-
|
||||
collections.abc: 3.3-
|
||||
colorsys: 3.0-
|
||||
compileall: 3.0-
|
||||
concurrent: 3.2-
|
||||
configparser: 3.0-
|
||||
contextlib: 3.0-
|
||||
contextvars: 3.7-
|
||||
copy: 3.0-
|
||||
copyreg: 3.0-
|
||||
crypt: 3.0-3.12
|
||||
csv: 3.0-
|
||||
ctypes: 3.0-
|
||||
curses: 3.0-
|
||||
dataclasses: 3.7-
|
||||
datetime: 3.0-
|
||||
dbm: 3.0-
|
||||
dbm.sqlite3: 3.13-
|
||||
decimal: 3.0-
|
||||
difflib: 3.0-
|
||||
dis: 3.0-
|
||||
distutils: 3.0-3.11
|
||||
distutils.command.bdist_msi: 3.0-3.10
|
||||
distutils.command.bdist_wininst: 3.0-3.9
|
||||
doctest: 3.0-
|
||||
dummy_threading: 3.0-3.8
|
||||
email: 3.0-
|
||||
encodings: 3.0-
|
||||
ensurepip: 3.0-
|
||||
enum: 3.4-
|
||||
errno: 3.0-
|
||||
faulthandler: 3.3-
|
||||
fcntl: 3.0-
|
||||
filecmp: 3.0-
|
||||
fileinput: 3.0-
|
||||
fnmatch: 3.0-
|
||||
formatter: 3.0-3.9
|
||||
fractions: 3.0-
|
||||
ftplib: 3.0-
|
||||
functools: 3.0-
|
||||
gc: 3.0-
|
||||
genericpath: 3.0-
|
||||
getopt: 3.0-
|
||||
getpass: 3.0-
|
||||
gettext: 3.0-
|
||||
glob: 3.0-
|
||||
graphlib: 3.9-
|
||||
grp: 3.0-
|
||||
gzip: 3.0-
|
||||
hashlib: 3.0-
|
||||
heapq: 3.0-
|
||||
hmac: 3.0-
|
||||
html: 3.0-
|
||||
http: 3.0-
|
||||
imaplib: 3.0-
|
||||
imghdr: 3.0-3.12
|
||||
imp: 3.0-3.11
|
||||
importlib: 3.0-
|
||||
importlib._abc: 3.10-
|
||||
importlib._bootstrap: 3.0-
|
||||
importlib._bootstrap_external: 3.5-
|
||||
importlib.metadata: 3.8-
|
||||
importlib.metadata._meta: 3.10-
|
||||
importlib.metadata.diagnose: 3.13-
|
||||
importlib.readers: 3.10-
|
||||
importlib.resources: 3.7-
|
||||
importlib.resources._common: 3.11-
|
||||
importlib.resources._functional: 3.13-
|
||||
importlib.resources.abc: 3.11-
|
||||
importlib.resources.readers: 3.11-
|
||||
importlib.resources.simple: 3.11-
|
||||
importlib.simple: 3.11-
|
||||
inspect: 3.0-
|
||||
io: 3.0-
|
||||
ipaddress: 3.3-
|
||||
itertools: 3.0-
|
||||
json: 3.0-
|
||||
keyword: 3.0-
|
||||
lib2to3: 3.0-3.12
|
||||
linecache: 3.0-
|
||||
locale: 3.0-
|
||||
logging: 3.0-
|
||||
lzma: 3.3-
|
||||
mailbox: 3.0-
|
||||
mailcap: 3.0-3.12
|
||||
marshal: 3.0-
|
||||
math: 3.0-
|
||||
mimetypes: 3.0-
|
||||
mmap: 3.0-
|
||||
modulefinder: 3.0-
|
||||
msilib: 3.0-3.12
|
||||
msvcrt: 3.0-
|
||||
multiprocessing: 3.0-
|
||||
multiprocessing.resource_tracker: 3.8-
|
||||
multiprocessing.shared_memory: 3.8-
|
||||
netrc: 3.0-
|
||||
nis: 3.0-3.12
|
||||
nntplib: 3.0-3.12
|
||||
nt: 3.0-
|
||||
ntpath: 3.0-
|
||||
nturl2path: 3.0-
|
||||
numbers: 3.0-
|
||||
opcode: 3.0-
|
||||
operator: 3.0-
|
||||
optparse: 3.0-
|
||||
os: 3.0-
|
||||
ossaudiodev: 3.0-3.12
|
||||
parser: 3.0-3.9
|
||||
pathlib: 3.4-
|
||||
pdb: 3.0-
|
||||
pickle: 3.0-
|
||||
pickletools: 3.0-
|
||||
pipes: 3.0-3.12
|
||||
pkgutil: 3.0-
|
||||
platform: 3.0-
|
||||
plistlib: 3.0-
|
||||
poplib: 3.0-
|
||||
posix: 3.0-
|
||||
posixpath: 3.0-
|
||||
pprint: 3.0-
|
||||
profile: 3.0-
|
||||
pstats: 3.0-
|
||||
pty: 3.0-
|
||||
pwd: 3.0-
|
||||
py_compile: 3.0-
|
||||
pyclbr: 3.0-
|
||||
pydoc: 3.0-
|
||||
pydoc_data: 3.0-
|
||||
pyexpat: 3.0-
|
||||
queue: 3.0-
|
||||
quopri: 3.0-
|
||||
random: 3.0-
|
||||
re: 3.0-
|
||||
readline: 3.0-
|
||||
reprlib: 3.0-
|
||||
resource: 3.0-
|
||||
rlcompleter: 3.0-
|
||||
runpy: 3.0-
|
||||
sched: 3.0-
|
||||
secrets: 3.6-
|
||||
select: 3.0-
|
||||
selectors: 3.4-
|
||||
shelve: 3.0-
|
||||
shlex: 3.0-
|
||||
shutil: 3.0-
|
||||
signal: 3.0-
|
||||
site: 3.0-
|
||||
smtpd: 3.0-3.11
|
||||
smtplib: 3.0-
|
||||
sndhdr: 3.0-3.12
|
||||
socket: 3.0-
|
||||
socketserver: 3.0-
|
||||
spwd: 3.0-3.12
|
||||
sqlite3: 3.0-
|
||||
sre_compile: 3.0-
|
||||
sre_constants: 3.0-
|
||||
sre_parse: 3.0-
|
||||
ssl: 3.0-
|
||||
stat: 3.0-
|
||||
statistics: 3.4-
|
||||
string: 3.0-
|
||||
stringprep: 3.0-
|
||||
struct: 3.0-
|
||||
subprocess: 3.0-
|
||||
sunau: 3.0-3.12
|
||||
symbol: 3.0-3.9
|
||||
symtable: 3.0-
|
||||
sys: 3.0-
|
||||
sys._monitoring: 3.12- # Doesn't actually exist. See comments in the stub.
|
||||
sysconfig: 3.0-
|
||||
syslog: 3.0-
|
||||
tabnanny: 3.0-
|
||||
tarfile: 3.0-
|
||||
telnetlib: 3.0-3.12
|
||||
tempfile: 3.0-
|
||||
termios: 3.0-
|
||||
textwrap: 3.0-
|
||||
this: 3.0-
|
||||
threading: 3.0-
|
||||
time: 3.0-
|
||||
timeit: 3.0-
|
||||
tkinter: 3.0-
|
||||
tkinter.tix: 3.0-3.12
|
||||
token: 3.0-
|
||||
tokenize: 3.0-
|
||||
tomllib: 3.11-
|
||||
trace: 3.0-
|
||||
traceback: 3.0-
|
||||
tracemalloc: 3.4-
|
||||
tty: 3.0-
|
||||
turtle: 3.0-
|
||||
types: 3.0-
|
||||
typing: 3.5-
|
||||
typing_extensions: 3.0-
|
||||
unicodedata: 3.0-
|
||||
unittest: 3.0-
|
||||
unittest._log: 3.9-
|
||||
unittest.async_case: 3.8-
|
||||
urllib: 3.0-
|
||||
uu: 3.0-3.12
|
||||
uuid: 3.0-
|
||||
venv: 3.3-
|
||||
warnings: 3.0-
|
||||
wave: 3.0-
|
||||
weakref: 3.0-
|
||||
webbrowser: 3.0-
|
||||
winreg: 3.0-
|
||||
winsound: 3.0-
|
||||
wsgiref: 3.0-
|
||||
wsgiref.types: 3.11-
|
||||
xdrlib: 3.0-3.12
|
||||
xml: 3.0-
|
||||
xmlrpc: 3.0-
|
||||
xxlimited: 3.2-
|
||||
zipapp: 3.5-
|
||||
zipfile: 3.0-
|
||||
zipfile._path: 3.12-
|
||||
zipimport: 3.0-
|
||||
zlib: 3.0-
|
||||
zoneinfo: 3.9-
|
||||
36
.venv/lib/python3.12/site-packages/stdlib/__future__.pyi
Normal file
36
.venv/lib/python3.12/site-packages/stdlib/__future__.pyi
Normal file
@@ -0,0 +1,36 @@
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
_VersionInfo: TypeAlias = tuple[int, int, int, str, int]
|
||||
|
||||
class _Feature:
|
||||
def __init__(self, optionalRelease: _VersionInfo, mandatoryRelease: _VersionInfo | None, compiler_flag: int) -> None: ...
|
||||
def getOptionalRelease(self) -> _VersionInfo: ...
|
||||
def getMandatoryRelease(self) -> _VersionInfo | None: ...
|
||||
compiler_flag: int
|
||||
|
||||
absolute_import: _Feature
|
||||
division: _Feature
|
||||
generators: _Feature
|
||||
nested_scopes: _Feature
|
||||
print_function: _Feature
|
||||
unicode_literals: _Feature
|
||||
with_statement: _Feature
|
||||
barry_as_FLUFL: _Feature
|
||||
generator_stop: _Feature
|
||||
annotations: _Feature
|
||||
|
||||
all_feature_names: list[str] # undocumented
|
||||
|
||||
__all__ = [
|
||||
"all_feature_names",
|
||||
"absolute_import",
|
||||
"division",
|
||||
"generators",
|
||||
"nested_scopes",
|
||||
"print_function",
|
||||
"unicode_literals",
|
||||
"with_statement",
|
||||
"barry_as_FLUFL",
|
||||
"generator_stop",
|
||||
"annotations",
|
||||
]
|
||||
149
.venv/lib/python3.12/site-packages/stdlib/_ast.pyi
Normal file
149
.venv/lib/python3.12/site-packages/stdlib/_ast.pyi
Normal file
@@ -0,0 +1,149 @@
|
||||
import sys
|
||||
from ast import (
|
||||
AST as AST,
|
||||
Add as Add,
|
||||
And as And,
|
||||
AnnAssign as AnnAssign,
|
||||
Assert as Assert,
|
||||
Assign as Assign,
|
||||
AsyncFor as AsyncFor,
|
||||
AsyncFunctionDef as AsyncFunctionDef,
|
||||
AsyncWith as AsyncWith,
|
||||
Attribute as Attribute,
|
||||
AugAssign as AugAssign,
|
||||
Await as Await,
|
||||
BinOp as BinOp,
|
||||
BitAnd as BitAnd,
|
||||
BitOr as BitOr,
|
||||
BitXor as BitXor,
|
||||
BoolOp as BoolOp,
|
||||
Break as Break,
|
||||
Call as Call,
|
||||
ClassDef as ClassDef,
|
||||
Compare as Compare,
|
||||
Constant as Constant,
|
||||
Continue as Continue,
|
||||
Del as Del,
|
||||
Delete as Delete,
|
||||
Dict as Dict,
|
||||
DictComp as DictComp,
|
||||
Div as Div,
|
||||
Eq as Eq,
|
||||
ExceptHandler as ExceptHandler,
|
||||
Expr as Expr,
|
||||
Expression as Expression,
|
||||
FloorDiv as FloorDiv,
|
||||
For as For,
|
||||
FormattedValue as FormattedValue,
|
||||
FunctionDef as FunctionDef,
|
||||
FunctionType as FunctionType,
|
||||
GeneratorExp as GeneratorExp,
|
||||
Global as Global,
|
||||
Gt as Gt,
|
||||
GtE as GtE,
|
||||
If as If,
|
||||
IfExp as IfExp,
|
||||
Import as Import,
|
||||
ImportFrom as ImportFrom,
|
||||
In as In,
|
||||
Interactive as Interactive,
|
||||
Invert as Invert,
|
||||
Is as Is,
|
||||
IsNot as IsNot,
|
||||
JoinedStr as JoinedStr,
|
||||
Lambda as Lambda,
|
||||
List as List,
|
||||
ListComp as ListComp,
|
||||
Load as Load,
|
||||
LShift as LShift,
|
||||
Lt as Lt,
|
||||
LtE as LtE,
|
||||
MatMult as MatMult,
|
||||
Mod as Mod,
|
||||
Module as Module,
|
||||
Mult as Mult,
|
||||
Name as Name,
|
||||
NamedExpr as NamedExpr,
|
||||
Nonlocal as Nonlocal,
|
||||
Not as Not,
|
||||
NotEq as NotEq,
|
||||
NotIn as NotIn,
|
||||
Or as Or,
|
||||
Pass as Pass,
|
||||
Pow as Pow,
|
||||
Raise as Raise,
|
||||
Return as Return,
|
||||
RShift as RShift,
|
||||
Set as Set,
|
||||
SetComp as SetComp,
|
||||
Slice as Slice,
|
||||
Starred as Starred,
|
||||
Store as Store,
|
||||
Sub as Sub,
|
||||
Subscript as Subscript,
|
||||
Try as Try,
|
||||
Tuple as Tuple,
|
||||
TypeIgnore as TypeIgnore,
|
||||
UAdd as UAdd,
|
||||
UnaryOp as UnaryOp,
|
||||
USub as USub,
|
||||
While as While,
|
||||
With as With,
|
||||
Yield as Yield,
|
||||
YieldFrom as YieldFrom,
|
||||
alias as alias,
|
||||
arg as arg,
|
||||
arguments as arguments,
|
||||
boolop as boolop,
|
||||
cmpop as cmpop,
|
||||
comprehension as comprehension,
|
||||
excepthandler as excepthandler,
|
||||
expr as expr,
|
||||
expr_context as expr_context,
|
||||
keyword as keyword,
|
||||
mod as mod,
|
||||
operator as operator,
|
||||
stmt as stmt,
|
||||
type_ignore as type_ignore,
|
||||
unaryop as unaryop,
|
||||
withitem as withitem,
|
||||
)
|
||||
from typing import Literal
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
from ast import ParamSpec as ParamSpec, TypeVar as TypeVar, TypeVarTuple as TypeVarTuple, type_param as type_param
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
pass
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
from ast import (
|
||||
MatchAs as MatchAs,
|
||||
MatchClass as MatchClass,
|
||||
MatchMapping as MatchMapping,
|
||||
MatchOr as MatchOr,
|
||||
MatchSequence as MatchSequence,
|
||||
MatchSingleton as MatchSingleton,
|
||||
MatchStar as MatchStar,
|
||||
MatchValue as MatchValue,
|
||||
match_case as match_case,
|
||||
pattern as pattern,
|
||||
)
|
||||
|
||||
if sys.version_info < (3, 9):
|
||||
from ast import (
|
||||
AugLoad as AugLoad,
|
||||
AugStore as AugStore,
|
||||
ExtSlice as ExtSlice,
|
||||
Index as Index,
|
||||
Param as Param,
|
||||
Suite as Suite,
|
||||
slice as slice,
|
||||
)
|
||||
|
||||
PyCF_ALLOW_TOP_LEVEL_AWAIT: Literal[8192]
|
||||
PyCF_ONLY_AST: Literal[1024]
|
||||
PyCF_TYPE_COMMENTS: Literal[4096]
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
PyCF_OPTIMIZED_AST: Literal[33792]
|
||||
127
.venv/lib/python3.12/site-packages/stdlib/_codecs.pyi
Normal file
127
.venv/lib/python3.12/site-packages/stdlib/_codecs.pyi
Normal file
@@ -0,0 +1,127 @@
|
||||
import codecs
|
||||
import sys
|
||||
from _typeshed import ReadableBuffer
|
||||
from collections.abc import Callable
|
||||
from typing import Literal, overload
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
# This type is not exposed; it is defined in unicodeobject.c
|
||||
class _EncodingMap:
|
||||
def size(self) -> int: ...
|
||||
|
||||
_CharMap: TypeAlias = dict[int, int] | _EncodingMap
|
||||
_Handler: TypeAlias = Callable[[UnicodeError], tuple[str | bytes, int]]
|
||||
_SearchFunction: TypeAlias = Callable[[str], codecs.CodecInfo | None]
|
||||
|
||||
def register(search_function: _SearchFunction, /) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
def unregister(search_function: _SearchFunction, /) -> None: ...
|
||||
|
||||
def register_error(errors: str, handler: _Handler, /) -> None: ...
|
||||
def lookup_error(name: str, /) -> _Handler: ...
|
||||
|
||||
# The type ignore on `encode` and `decode` is to avoid issues with overlapping overloads, for more details, see #300
|
||||
# https://docs.python.org/3/library/codecs.html#binary-transforms
|
||||
_BytesToBytesEncoding: TypeAlias = Literal[
|
||||
"base64",
|
||||
"base_64",
|
||||
"base64_codec",
|
||||
"bz2",
|
||||
"bz2_codec",
|
||||
"hex",
|
||||
"hex_codec",
|
||||
"quopri",
|
||||
"quotedprintable",
|
||||
"quoted_printable",
|
||||
"quopri_codec",
|
||||
"uu",
|
||||
"uu_codec",
|
||||
"zip",
|
||||
"zlib",
|
||||
"zlib_codec",
|
||||
]
|
||||
# https://docs.python.org/3/library/codecs.html#text-transforms
|
||||
_StrToStrEncoding: TypeAlias = Literal["rot13", "rot_13"]
|
||||
|
||||
@overload
|
||||
def encode(obj: ReadableBuffer, encoding: _BytesToBytesEncoding, errors: str = "strict") -> bytes: ...
|
||||
@overload
|
||||
def encode(obj: str, encoding: _StrToStrEncoding, errors: str = "strict") -> str: ... # type: ignore[overload-overlap]
|
||||
@overload
|
||||
def encode(obj: str, encoding: str = "utf-8", errors: str = "strict") -> bytes: ...
|
||||
@overload
|
||||
def decode(obj: ReadableBuffer, encoding: _BytesToBytesEncoding, errors: str = "strict") -> bytes: ... # type: ignore[overload-overlap]
|
||||
@overload
|
||||
def decode(obj: str, encoding: _StrToStrEncoding, errors: str = "strict") -> str: ...
|
||||
|
||||
# these are documented as text encodings but in practice they also accept str as input
|
||||
@overload
|
||||
def decode(
|
||||
obj: str,
|
||||
encoding: Literal["unicode_escape", "unicode-escape", "raw_unicode_escape", "raw-unicode-escape"],
|
||||
errors: str = "strict",
|
||||
) -> str: ...
|
||||
|
||||
# hex is officially documented as a bytes to bytes encoding, but it appears to also work with str
|
||||
@overload
|
||||
def decode(obj: str, encoding: Literal["hex", "hex_codec"], errors: str = "strict") -> bytes: ...
|
||||
@overload
|
||||
def decode(obj: ReadableBuffer, encoding: str = "utf-8", errors: str = "strict") -> str: ...
|
||||
def lookup(encoding: str, /) -> codecs.CodecInfo: ...
|
||||
def charmap_build(map: str, /) -> _CharMap: ...
|
||||
def ascii_decode(data: ReadableBuffer, errors: str | None = None, /) -> tuple[str, int]: ...
|
||||
def ascii_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
||||
def charmap_decode(data: ReadableBuffer, errors: str | None = None, mapping: _CharMap | None = None, /) -> tuple[str, int]: ...
|
||||
def charmap_encode(str: str, errors: str | None = None, mapping: _CharMap | None = None, /) -> tuple[bytes, int]: ...
|
||||
def escape_decode(data: str | ReadableBuffer, errors: str | None = None, /) -> tuple[str, int]: ...
|
||||
def escape_encode(data: bytes, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
||||
def latin_1_decode(data: ReadableBuffer, errors: str | None = None, /) -> tuple[str, int]: ...
|
||||
def latin_1_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
def raw_unicode_escape_decode(data: str | ReadableBuffer, errors: str | None = None, final: bool = True, /) -> tuple[str, int]: ...
|
||||
|
||||
else:
|
||||
def raw_unicode_escape_decode(data: str | ReadableBuffer, errors: str | None = None, /) -> tuple[str, int]: ...
|
||||
|
||||
def raw_unicode_escape_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
||||
def readbuffer_encode(data: str | ReadableBuffer, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
def unicode_escape_decode(data: str | ReadableBuffer, errors: str | None = None, final: bool = True, /) -> tuple[str, int]: ...
|
||||
|
||||
else:
|
||||
def unicode_escape_decode(data: str | ReadableBuffer, errors: str | None = None, /) -> tuple[str, int]: ...
|
||||
|
||||
def unicode_escape_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
||||
def utf_16_be_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ...
|
||||
def utf_16_be_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
||||
def utf_16_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ...
|
||||
def utf_16_encode(str: str, errors: str | None = None, byteorder: int = 0, /) -> tuple[bytes, int]: ...
|
||||
def utf_16_ex_decode(
|
||||
data: ReadableBuffer, errors: str | None = None, byteorder: int = 0, final: bool = False, /
|
||||
) -> tuple[str, int, int]: ...
|
||||
def utf_16_le_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ...
|
||||
def utf_16_le_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
||||
def utf_32_be_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ...
|
||||
def utf_32_be_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
||||
def utf_32_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ...
|
||||
def utf_32_encode(str: str, errors: str | None = None, byteorder: int = 0, /) -> tuple[bytes, int]: ...
|
||||
def utf_32_ex_decode(
|
||||
data: ReadableBuffer, errors: str | None = None, byteorder: int = 0, final: bool = False, /
|
||||
) -> tuple[str, int, int]: ...
|
||||
def utf_32_le_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ...
|
||||
def utf_32_le_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
||||
def utf_7_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ...
|
||||
def utf_7_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
||||
def utf_8_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ...
|
||||
def utf_8_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
||||
|
||||
if sys.platform == "win32":
|
||||
def mbcs_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ...
|
||||
def mbcs_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
||||
def code_page_decode(codepage: int, data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ...
|
||||
def code_page_encode(code_page: int, str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
||||
def oem_decode(data: ReadableBuffer, errors: str | None = None, final: bool = False, /) -> tuple[str, int]: ...
|
||||
def oem_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ...
|
||||
101
.venv/lib/python3.12/site-packages/stdlib/_collections_abc.pyi
Normal file
101
.venv/lib/python3.12/site-packages/stdlib/_collections_abc.pyi
Normal file
@@ -0,0 +1,101 @@
|
||||
import sys
|
||||
from abc import abstractmethod
|
||||
from types import MappingProxyType
|
||||
from typing import ( # noqa: Y022,Y038
|
||||
AbstractSet as Set,
|
||||
AsyncGenerator as AsyncGenerator,
|
||||
AsyncIterable as AsyncIterable,
|
||||
AsyncIterator as AsyncIterator,
|
||||
Awaitable as Awaitable,
|
||||
Callable as Callable,
|
||||
Collection as Collection,
|
||||
Container as Container,
|
||||
Coroutine as Coroutine,
|
||||
Generator as Generator,
|
||||
Generic,
|
||||
Hashable as Hashable,
|
||||
ItemsView as ItemsView,
|
||||
Iterable as Iterable,
|
||||
Iterator as Iterator,
|
||||
KeysView as KeysView,
|
||||
Mapping as Mapping,
|
||||
MappingView as MappingView,
|
||||
MutableMapping as MutableMapping,
|
||||
MutableSequence as MutableSequence,
|
||||
MutableSet as MutableSet,
|
||||
Protocol,
|
||||
Reversible as Reversible,
|
||||
Sequence as Sequence,
|
||||
Sized as Sized,
|
||||
TypeVar,
|
||||
ValuesView as ValuesView,
|
||||
final,
|
||||
runtime_checkable,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"Awaitable",
|
||||
"Coroutine",
|
||||
"AsyncIterable",
|
||||
"AsyncIterator",
|
||||
"AsyncGenerator",
|
||||
"Hashable",
|
||||
"Iterable",
|
||||
"Iterator",
|
||||
"Generator",
|
||||
"Reversible",
|
||||
"Sized",
|
||||
"Container",
|
||||
"Callable",
|
||||
"Collection",
|
||||
"Set",
|
||||
"MutableSet",
|
||||
"Mapping",
|
||||
"MutableMapping",
|
||||
"MappingView",
|
||||
"KeysView",
|
||||
"ItemsView",
|
||||
"ValuesView",
|
||||
"Sequence",
|
||||
"MutableSequence",
|
||||
]
|
||||
if sys.version_info < (3, 14):
|
||||
from typing import ByteString as ByteString # noqa: Y057
|
||||
|
||||
__all__ += ["ByteString"]
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
__all__ += ["Buffer"]
|
||||
|
||||
_KT_co = TypeVar("_KT_co", covariant=True) # Key type covariant containers.
|
||||
_VT_co = TypeVar("_VT_co", covariant=True) # Value type covariant containers.
|
||||
|
||||
@final
|
||||
class dict_keys(KeysView[_KT_co], Generic[_KT_co, _VT_co]): # undocumented
|
||||
def __eq__(self, value: object, /) -> bool: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
def isdisjoint(self, other: Iterable[_KT_co], /) -> bool: ...
|
||||
if sys.version_info >= (3, 10):
|
||||
@property
|
||||
def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ...
|
||||
|
||||
@final
|
||||
class dict_values(ValuesView[_VT_co], Generic[_KT_co, _VT_co]): # undocumented
|
||||
if sys.version_info >= (3, 10):
|
||||
@property
|
||||
def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ...
|
||||
|
||||
@final
|
||||
class dict_items(ItemsView[_KT_co, _VT_co]): # undocumented
|
||||
def __eq__(self, value: object, /) -> bool: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
def isdisjoint(self, other: Iterable[tuple[_KT_co, _VT_co]], /) -> bool: ...
|
||||
if sys.version_info >= (3, 10):
|
||||
@property
|
||||
def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ...
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
@runtime_checkable
|
||||
class Buffer(Protocol):
|
||||
@abstractmethod
|
||||
def __buffer__(self, flags: int, /) -> memoryview: ...
|
||||
74
.venv/lib/python3.12/site-packages/stdlib/_decimal.pyi
Normal file
74
.venv/lib/python3.12/site-packages/stdlib/_decimal.pyi
Normal file
@@ -0,0 +1,74 @@
|
||||
import sys
|
||||
from decimal import (
|
||||
Clamped as Clamped,
|
||||
Context as Context,
|
||||
ConversionSyntax as ConversionSyntax,
|
||||
Decimal as Decimal,
|
||||
DecimalException as DecimalException,
|
||||
DecimalTuple as DecimalTuple,
|
||||
DivisionByZero as DivisionByZero,
|
||||
DivisionImpossible as DivisionImpossible,
|
||||
DivisionUndefined as DivisionUndefined,
|
||||
FloatOperation as FloatOperation,
|
||||
Inexact as Inexact,
|
||||
InvalidContext as InvalidContext,
|
||||
InvalidOperation as InvalidOperation,
|
||||
Overflow as Overflow,
|
||||
Rounded as Rounded,
|
||||
Subnormal as Subnormal,
|
||||
Underflow as Underflow,
|
||||
)
|
||||
from types import TracebackType
|
||||
from typing import Final
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
_TrapType: TypeAlias = type[DecimalException]
|
||||
|
||||
class _ContextManager:
|
||||
new_context: Context
|
||||
saved_context: Context
|
||||
def __init__(self, new_context: Context) -> None: ...
|
||||
def __enter__(self) -> Context: ...
|
||||
def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ...
|
||||
|
||||
__version__: Final[str]
|
||||
__libmpdec_version__: Final[str]
|
||||
|
||||
ROUND_DOWN: Final[str]
|
||||
ROUND_HALF_UP: Final[str]
|
||||
ROUND_HALF_EVEN: Final[str]
|
||||
ROUND_CEILING: Final[str]
|
||||
ROUND_FLOOR: Final[str]
|
||||
ROUND_UP: Final[str]
|
||||
ROUND_HALF_DOWN: Final[str]
|
||||
ROUND_05UP: Final[str]
|
||||
HAVE_CONTEXTVAR: Final[bool]
|
||||
HAVE_THREADS: Final[bool]
|
||||
MAX_EMAX: Final[int]
|
||||
MAX_PREC: Final[int]
|
||||
MIN_EMIN: Final[int]
|
||||
MIN_ETINY: Final[int]
|
||||
|
||||
def setcontext(context: Context, /) -> None: ...
|
||||
def getcontext() -> Context: ...
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
def localcontext(
|
||||
ctx: Context | None = None,
|
||||
*,
|
||||
prec: int | None = ...,
|
||||
rounding: str | None = ...,
|
||||
Emin: int | None = ...,
|
||||
Emax: int | None = ...,
|
||||
capitals: int | None = ...,
|
||||
clamp: int | None = ...,
|
||||
traps: dict[_TrapType, bool] | None = ...,
|
||||
flags: dict[_TrapType, bool] | None = ...,
|
||||
) -> _ContextManager: ...
|
||||
|
||||
else:
|
||||
def localcontext(ctx: Context | None = None) -> _ContextManager: ...
|
||||
|
||||
DefaultContext: Context
|
||||
BasicContext: Context
|
||||
ExtendedContext: Context
|
||||
@@ -0,0 +1,34 @@
|
||||
# Utility types for typeshed
|
||||
|
||||
This package and its submodules contains various common types used by
|
||||
typeshed. It can also be used by packages outside typeshed, but beware
|
||||
the API stability guarantees below.
|
||||
|
||||
## Usage
|
||||
|
||||
The `_typeshed` package and its types do not exist at runtime, but can be
|
||||
used freely in stubs (`.pyi`) files. To import the types from this package in
|
||||
implementation (`.py`) files, use the following construct:
|
||||
|
||||
```python
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from _typeshed import ...
|
||||
```
|
||||
|
||||
Types can then be used in annotations by either quoting them or
|
||||
using:
|
||||
|
||||
```python
|
||||
from __future__ import annotations
|
||||
```
|
||||
|
||||
## API Stability
|
||||
|
||||
You can use this package and its submodules outside of typeshed, but we
|
||||
guarantee only limited API stability. Items marked as "stable" will not be
|
||||
removed or changed in an incompatible way for at least one year.
|
||||
Before making such a change, the "stable" moniker will be removed
|
||||
and we will mark the type in question as deprecated. No guarantees
|
||||
are made about unmarked types.
|
||||
361
.venv/lib/python3.12/site-packages/stdlib/_typeshed/__init__.pyi
Normal file
361
.venv/lib/python3.12/site-packages/stdlib/_typeshed/__init__.pyi
Normal file
@@ -0,0 +1,361 @@
|
||||
# Utility types for typeshed
|
||||
#
|
||||
# See the README.md file in this directory for more information.
|
||||
|
||||
import sys
|
||||
from collections.abc import Awaitable, Callable, Iterable, Sequence, Set as AbstractSet, Sized
|
||||
from dataclasses import Field
|
||||
from os import PathLike
|
||||
from types import FrameType, TracebackType
|
||||
from typing import (
|
||||
Any,
|
||||
AnyStr,
|
||||
ClassVar,
|
||||
Final,
|
||||
Generic,
|
||||
Literal,
|
||||
Protocol,
|
||||
SupportsFloat,
|
||||
SupportsIndex,
|
||||
SupportsInt,
|
||||
TypeVar,
|
||||
final,
|
||||
overload,
|
||||
)
|
||||
from typing_extensions import Buffer, LiteralString, TypeAlias
|
||||
|
||||
_KT = TypeVar("_KT")
|
||||
_KT_co = TypeVar("_KT_co", covariant=True)
|
||||
_KT_contra = TypeVar("_KT_contra", contravariant=True)
|
||||
_VT = TypeVar("_VT")
|
||||
_VT_co = TypeVar("_VT_co", covariant=True)
|
||||
_T = TypeVar("_T")
|
||||
_T_co = TypeVar("_T_co", covariant=True)
|
||||
_T_contra = TypeVar("_T_contra", contravariant=True)
|
||||
|
||||
# Alternative to `typing_extensions.Self`, exclusively for use with `__new__`
|
||||
# in metaclasses:
|
||||
# def __new__(cls: type[Self], ...) -> Self: ...
|
||||
# In other cases, use `typing_extensions.Self`.
|
||||
Self = TypeVar("Self") # noqa: Y001
|
||||
|
||||
# covariant version of typing.AnyStr, useful for protocols
|
||||
AnyStr_co = TypeVar("AnyStr_co", str, bytes, covariant=True) # noqa: Y001
|
||||
|
||||
# For partially known annotations. Usually, fields where type annotations
|
||||
# haven't been added are left unannotated, but in some situations this
|
||||
# isn't possible or a type is already partially known. In cases like these,
|
||||
# use Incomplete instead of Any as a marker. For example, use
|
||||
# "Incomplete | None" instead of "Any | None".
|
||||
Incomplete: TypeAlias = Any # stable
|
||||
|
||||
# To describe a function parameter that is unused and will work with anything.
|
||||
Unused: TypeAlias = object # stable
|
||||
|
||||
# Marker for return types that include None, but where forcing the user to
|
||||
# check for None can be detrimental. Sometimes called "the Any trick". See
|
||||
# CONTRIBUTING.md for more information.
|
||||
MaybeNone: TypeAlias = Any # stable
|
||||
|
||||
# Used to mark arguments that default to a sentinel value. This prevents
|
||||
# stubtest from complaining about the default value not matching.
|
||||
#
|
||||
# def foo(x: int | None = sentinel) -> None: ...
|
||||
#
|
||||
# In cases where the sentinel object is exported and can be used by user code,
|
||||
# a construct like this is better:
|
||||
#
|
||||
# _SentinelType = NewType("_SentinelType", object)
|
||||
# sentinel: _SentinelType
|
||||
# def foo(x: int | None | _SentinelType = ...) -> None: ...
|
||||
sentinel: Any
|
||||
|
||||
# stable
|
||||
class IdentityFunction(Protocol):
|
||||
def __call__(self, x: _T, /) -> _T: ...
|
||||
|
||||
# stable
|
||||
class SupportsNext(Protocol[_T_co]):
|
||||
def __next__(self) -> _T_co: ...
|
||||
|
||||
# stable
|
||||
class SupportsAnext(Protocol[_T_co]):
|
||||
def __anext__(self) -> Awaitable[_T_co]: ...
|
||||
|
||||
# Comparison protocols
|
||||
|
||||
class SupportsDunderLT(Protocol[_T_contra]):
|
||||
def __lt__(self, other: _T_contra, /) -> bool: ...
|
||||
|
||||
class SupportsDunderGT(Protocol[_T_contra]):
|
||||
def __gt__(self, other: _T_contra, /) -> bool: ...
|
||||
|
||||
class SupportsDunderLE(Protocol[_T_contra]):
|
||||
def __le__(self, other: _T_contra, /) -> bool: ...
|
||||
|
||||
class SupportsDunderGE(Protocol[_T_contra]):
|
||||
def __ge__(self, other: _T_contra, /) -> bool: ...
|
||||
|
||||
class SupportsAllComparisons(SupportsDunderLT[Any], SupportsDunderGT[Any], SupportsDunderLE[Any], SupportsDunderGE[Any], Protocol): ...
|
||||
|
||||
SupportsRichComparison: TypeAlias = SupportsDunderLT[Any] | SupportsDunderGT[Any]
|
||||
SupportsRichComparisonT = TypeVar("SupportsRichComparisonT", bound=SupportsRichComparison) # noqa: Y001
|
||||
|
||||
# Dunder protocols
|
||||
|
||||
class SupportsAdd(Protocol[_T_contra, _T_co]):
|
||||
def __add__(self, x: _T_contra, /) -> _T_co: ...
|
||||
|
||||
class SupportsRAdd(Protocol[_T_contra, _T_co]):
|
||||
def __radd__(self, x: _T_contra, /) -> _T_co: ...
|
||||
|
||||
class SupportsSub(Protocol[_T_contra, _T_co]):
|
||||
def __sub__(self, x: _T_contra, /) -> _T_co: ...
|
||||
|
||||
class SupportsRSub(Protocol[_T_contra, _T_co]):
|
||||
def __rsub__(self, x: _T_contra, /) -> _T_co: ...
|
||||
|
||||
class SupportsDivMod(Protocol[_T_contra, _T_co]):
|
||||
def __divmod__(self, other: _T_contra, /) -> _T_co: ...
|
||||
|
||||
class SupportsRDivMod(Protocol[_T_contra, _T_co]):
|
||||
def __rdivmod__(self, other: _T_contra, /) -> _T_co: ...
|
||||
|
||||
# This protocol is generic over the iterator type, while Iterable is
|
||||
# generic over the type that is iterated over.
|
||||
class SupportsIter(Protocol[_T_co]):
|
||||
def __iter__(self) -> _T_co: ...
|
||||
|
||||
# This protocol is generic over the iterator type, while AsyncIterable is
|
||||
# generic over the type that is iterated over.
|
||||
class SupportsAiter(Protocol[_T_co]):
|
||||
def __aiter__(self) -> _T_co: ...
|
||||
|
||||
class SupportsLenAndGetItem(Protocol[_T_co]):
|
||||
def __len__(self) -> int: ...
|
||||
def __getitem__(self, k: int, /) -> _T_co: ...
|
||||
|
||||
class SupportsTrunc(Protocol):
|
||||
def __trunc__(self) -> int: ...
|
||||
|
||||
# Mapping-like protocols
|
||||
|
||||
# stable
|
||||
class SupportsItems(Protocol[_KT_co, _VT_co]):
|
||||
def items(self) -> AbstractSet[tuple[_KT_co, _VT_co]]: ...
|
||||
|
||||
# stable
|
||||
class SupportsKeysAndGetItem(Protocol[_KT, _VT_co]):
|
||||
def keys(self) -> Iterable[_KT]: ...
|
||||
def __getitem__(self, key: _KT, /) -> _VT_co: ...
|
||||
|
||||
# This protocol is currently under discussion. Use SupportsContainsAndGetItem
|
||||
# instead, if you require the __contains__ method.
|
||||
# See https://github.com/python/typeshed/issues/11822.
|
||||
class SupportsGetItem(Protocol[_KT_contra, _VT_co]):
|
||||
def __contains__(self, x: Any, /) -> bool: ...
|
||||
def __getitem__(self, key: _KT_contra, /) -> _VT_co: ...
|
||||
|
||||
# stable
|
||||
class SupportsContainsAndGetItem(Protocol[_KT_contra, _VT_co]):
|
||||
def __contains__(self, x: Any, /) -> bool: ...
|
||||
def __getitem__(self, key: _KT_contra, /) -> _VT_co: ...
|
||||
|
||||
# stable
|
||||
class SupportsItemAccess(Protocol[_KT_contra, _VT]):
|
||||
def __contains__(self, x: Any, /) -> bool: ...
|
||||
def __getitem__(self, key: _KT_contra, /) -> _VT: ...
|
||||
def __setitem__(self, key: _KT_contra, value: _VT, /) -> None: ...
|
||||
def __delitem__(self, key: _KT_contra, /) -> None: ...
|
||||
|
||||
StrPath: TypeAlias = str | PathLike[str] # stable
|
||||
BytesPath: TypeAlias = bytes | PathLike[bytes] # stable
|
||||
GenericPath: TypeAlias = AnyStr | PathLike[AnyStr]
|
||||
StrOrBytesPath: TypeAlias = str | bytes | PathLike[str] | PathLike[bytes] # stable
|
||||
|
||||
OpenTextModeUpdating: TypeAlias = Literal[
|
||||
"r+",
|
||||
"+r",
|
||||
"rt+",
|
||||
"r+t",
|
||||
"+rt",
|
||||
"tr+",
|
||||
"t+r",
|
||||
"+tr",
|
||||
"w+",
|
||||
"+w",
|
||||
"wt+",
|
||||
"w+t",
|
||||
"+wt",
|
||||
"tw+",
|
||||
"t+w",
|
||||
"+tw",
|
||||
"a+",
|
||||
"+a",
|
||||
"at+",
|
||||
"a+t",
|
||||
"+at",
|
||||
"ta+",
|
||||
"t+a",
|
||||
"+ta",
|
||||
"x+",
|
||||
"+x",
|
||||
"xt+",
|
||||
"x+t",
|
||||
"+xt",
|
||||
"tx+",
|
||||
"t+x",
|
||||
"+tx",
|
||||
]
|
||||
OpenTextModeWriting: TypeAlias = Literal["w", "wt", "tw", "a", "at", "ta", "x", "xt", "tx"]
|
||||
OpenTextModeReading: TypeAlias = Literal["r", "rt", "tr", "U", "rU", "Ur", "rtU", "rUt", "Urt", "trU", "tUr", "Utr"]
|
||||
OpenTextMode: TypeAlias = OpenTextModeUpdating | OpenTextModeWriting | OpenTextModeReading
|
||||
OpenBinaryModeUpdating: TypeAlias = Literal[
|
||||
"rb+",
|
||||
"r+b",
|
||||
"+rb",
|
||||
"br+",
|
||||
"b+r",
|
||||
"+br",
|
||||
"wb+",
|
||||
"w+b",
|
||||
"+wb",
|
||||
"bw+",
|
||||
"b+w",
|
||||
"+bw",
|
||||
"ab+",
|
||||
"a+b",
|
||||
"+ab",
|
||||
"ba+",
|
||||
"b+a",
|
||||
"+ba",
|
||||
"xb+",
|
||||
"x+b",
|
||||
"+xb",
|
||||
"bx+",
|
||||
"b+x",
|
||||
"+bx",
|
||||
]
|
||||
OpenBinaryModeWriting: TypeAlias = Literal["wb", "bw", "ab", "ba", "xb", "bx"]
|
||||
OpenBinaryModeReading: TypeAlias = Literal["rb", "br", "rbU", "rUb", "Urb", "brU", "bUr", "Ubr"]
|
||||
OpenBinaryMode: TypeAlias = OpenBinaryModeUpdating | OpenBinaryModeReading | OpenBinaryModeWriting
|
||||
|
||||
# stable
|
||||
class HasFileno(Protocol):
|
||||
def fileno(self) -> int: ...
|
||||
|
||||
FileDescriptor: TypeAlias = int # stable
|
||||
FileDescriptorLike: TypeAlias = int | HasFileno # stable
|
||||
FileDescriptorOrPath: TypeAlias = int | StrOrBytesPath
|
||||
|
||||
# stable
|
||||
class SupportsRead(Protocol[_T_co]):
|
||||
def read(self, length: int = ..., /) -> _T_co: ...
|
||||
|
||||
# stable
|
||||
class SupportsReadline(Protocol[_T_co]):
|
||||
def readline(self, length: int = ..., /) -> _T_co: ...
|
||||
|
||||
# stable
|
||||
class SupportsNoArgReadline(Protocol[_T_co]):
|
||||
def readline(self) -> _T_co: ...
|
||||
|
||||
# stable
|
||||
class SupportsWrite(Protocol[_T_contra]):
|
||||
def write(self, s: _T_contra, /) -> object: ...
|
||||
|
||||
# stable
|
||||
class SupportsFlush(Protocol):
|
||||
def flush(self) -> object: ...
|
||||
|
||||
# Unfortunately PEP 688 does not allow us to distinguish read-only
|
||||
# from writable buffers. We use these aliases for readability for now.
|
||||
# Perhaps a future extension of the buffer protocol will allow us to
|
||||
# distinguish these cases in the type system.
|
||||
ReadOnlyBuffer: TypeAlias = Buffer # stable
|
||||
# Anything that implements the read-write buffer interface.
|
||||
WriteableBuffer: TypeAlias = Buffer
|
||||
# Same as WriteableBuffer, but also includes read-only buffer types (like bytes).
|
||||
ReadableBuffer: TypeAlias = Buffer # stable
|
||||
|
||||
class SliceableBuffer(Buffer, Protocol):
|
||||
def __getitem__(self, slice: slice, /) -> Sequence[int]: ...
|
||||
|
||||
class IndexableBuffer(Buffer, Protocol):
|
||||
def __getitem__(self, i: int, /) -> int: ...
|
||||
|
||||
class SupportsGetItemBuffer(SliceableBuffer, IndexableBuffer, Protocol):
|
||||
def __contains__(self, x: Any, /) -> bool: ...
|
||||
@overload
|
||||
def __getitem__(self, slice: slice, /) -> Sequence[int]: ...
|
||||
@overload
|
||||
def __getitem__(self, i: int, /) -> int: ...
|
||||
|
||||
class SizedBuffer(Sized, Buffer, Protocol): ...
|
||||
|
||||
# for compatibility with third-party stubs that may use this
|
||||
_BufferWithLen: TypeAlias = SizedBuffer # not stable # noqa: Y047
|
||||
|
||||
ExcInfo: TypeAlias = tuple[type[BaseException], BaseException, TracebackType]
|
||||
OptExcInfo: TypeAlias = ExcInfo | tuple[None, None, None]
|
||||
|
||||
# stable
|
||||
if sys.version_info >= (3, 10):
|
||||
from types import NoneType as NoneType
|
||||
else:
|
||||
# Used by type checkers for checks involving None (does not exist at runtime)
|
||||
@final
|
||||
class NoneType:
|
||||
def __bool__(self) -> Literal[False]: ...
|
||||
|
||||
# This is an internal CPython type that is like, but subtly different from, a NamedTuple
|
||||
# Subclasses of this type are found in multiple modules.
|
||||
# In typeshed, `structseq` is only ever used as a mixin in combination with a fixed-length `Tuple`
|
||||
# See discussion at #6546 & #6560
|
||||
# `structseq` classes are unsubclassable, so are all decorated with `@final`.
|
||||
class structseq(Generic[_T_co]):
|
||||
n_fields: Final[int]
|
||||
n_unnamed_fields: Final[int]
|
||||
n_sequence_fields: Final[int]
|
||||
# The first parameter will generally only take an iterable of a specific length.
|
||||
# E.g. `os.uname_result` takes any iterable of length exactly 5.
|
||||
#
|
||||
# The second parameter will accept a dict of any kind without raising an exception,
|
||||
# but only has any meaning if you supply it a dict where the keys are strings.
|
||||
# https://github.com/python/typeshed/pull/6560#discussion_r767149830
|
||||
def __new__(cls: type[Self], sequence: Iterable[_T_co], dict: dict[str, Any] = ...) -> Self: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
def __replace__(self: Self, **kwargs: Any) -> Self: ...
|
||||
|
||||
# Superset of typing.AnyStr that also includes LiteralString
|
||||
AnyOrLiteralStr = TypeVar("AnyOrLiteralStr", str, bytes, LiteralString) # noqa: Y001
|
||||
|
||||
# Represents when str or LiteralStr is acceptable. Useful for string processing
|
||||
# APIs where literalness of return value depends on literalness of inputs
|
||||
StrOrLiteralStr = TypeVar("StrOrLiteralStr", LiteralString, str) # noqa: Y001
|
||||
|
||||
# Objects suitable to be passed to sys.setprofile, threading.setprofile, and similar
|
||||
ProfileFunction: TypeAlias = Callable[[FrameType, str, Any], object]
|
||||
|
||||
# Objects suitable to be passed to sys.settrace, threading.settrace, and similar
|
||||
TraceFunction: TypeAlias = Callable[[FrameType, str, Any], TraceFunction | None]
|
||||
|
||||
# experimental
|
||||
# Might not work as expected for pyright, see
|
||||
# https://github.com/python/typeshed/pull/9362
|
||||
# https://github.com/microsoft/pyright/issues/4339
|
||||
class DataclassInstance(Protocol):
|
||||
__dataclass_fields__: ClassVar[dict[str, Field[Any]]] # type: ignore
|
||||
|
||||
# Anything that can be passed to the int/float constructors
|
||||
ConvertibleToInt: TypeAlias = str | ReadableBuffer | SupportsInt | SupportsIndex | SupportsTrunc
|
||||
ConvertibleToFloat: TypeAlias = str | ReadableBuffer | SupportsFloat | SupportsIndex
|
||||
|
||||
# A few classes updated from Foo(str, Enum) to Foo(StrEnum). This is a convenience so these
|
||||
# can be accurate on all python versions without getting too wordy
|
||||
if sys.version_info >= (3, 11):
|
||||
from enum import StrEnum as StrEnum
|
||||
else:
|
||||
from enum import Enum
|
||||
|
||||
class StrEnum(str, Enum): ...
|
||||
@@ -0,0 +1,37 @@
|
||||
# PEP 249 Database API 2.0 Types
|
||||
# https://www.python.org/dev/peps/pep-0249/
|
||||
|
||||
from collections.abc import Mapping, Sequence
|
||||
from typing import Any, Protocol
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
DBAPITypeCode: TypeAlias = Any | None
|
||||
# Strictly speaking, this should be a Sequence, but the type system does
|
||||
# not support fixed-length sequences.
|
||||
DBAPIColumnDescription: TypeAlias = tuple[str, DBAPITypeCode, int | None, int | None, int | None, int | None, bool | None]
|
||||
|
||||
class DBAPIConnection(Protocol):
|
||||
def close(self) -> object: ...
|
||||
def commit(self) -> object: ...
|
||||
# optional:
|
||||
# def rollback(self) -> Any: ...
|
||||
def cursor(self) -> DBAPICursor: ...
|
||||
|
||||
class DBAPICursor(Protocol):
|
||||
@property
|
||||
def description(self) -> Sequence[DBAPIColumnDescription] | None: ...
|
||||
@property
|
||||
def rowcount(self) -> int: ...
|
||||
# optional:
|
||||
# def callproc(self, procname: str, parameters: Sequence[Any] = ..., /) -> Sequence[Any]: ...
|
||||
def close(self) -> object: ...
|
||||
def execute(self, operation: str, parameters: Sequence[Any] | Mapping[str, Any] = ..., /) -> object: ...
|
||||
def executemany(self, operation: str, seq_of_parameters: Sequence[Sequence[Any]], /) -> object: ...
|
||||
def fetchone(self) -> Sequence[Any] | None: ...
|
||||
def fetchmany(self, size: int = ..., /) -> Sequence[Sequence[Any]]: ...
|
||||
def fetchall(self) -> Sequence[Sequence[Any]]: ...
|
||||
# optional:
|
||||
# def nextset(self) -> None | Literal[True]: ...
|
||||
arraysize: int
|
||||
def setinputsizes(self, sizes: Sequence[DBAPITypeCode | int | None], /) -> object: ...
|
||||
def setoutputsize(self, size: int, column: int = ..., /) -> object: ...
|
||||
@@ -0,0 +1,18 @@
|
||||
# Implicit protocols used in importlib.
|
||||
# We intentionally omit deprecated and optional methods.
|
||||
|
||||
from collections.abc import Sequence
|
||||
from importlib.machinery import ModuleSpec
|
||||
from types import ModuleType
|
||||
from typing import Protocol
|
||||
|
||||
__all__ = ["LoaderProtocol", "MetaPathFinderProtocol", "PathEntryFinderProtocol"]
|
||||
|
||||
class LoaderProtocol(Protocol):
|
||||
def load_module(self, fullname: str, /) -> ModuleType: ...
|
||||
|
||||
class MetaPathFinderProtocol(Protocol):
|
||||
def find_spec(self, fullname: str, path: Sequence[str] | None, target: ModuleType | None = ..., /) -> ModuleSpec | None: ...
|
||||
|
||||
class PathEntryFinderProtocol(Protocol):
|
||||
def find_spec(self, fullname: str, target: ModuleType | None = ..., /) -> ModuleSpec | None: ...
|
||||
44
.venv/lib/python3.12/site-packages/stdlib/_typeshed/wsgi.pyi
Normal file
44
.venv/lib/python3.12/site-packages/stdlib/_typeshed/wsgi.pyi
Normal file
@@ -0,0 +1,44 @@
|
||||
# Types to support PEP 3333 (WSGI)
|
||||
#
|
||||
# Obsolete since Python 3.11: Use wsgiref.types instead.
|
||||
#
|
||||
# See the README.md file in this directory for more information.
|
||||
|
||||
import sys
|
||||
from _typeshed import OptExcInfo
|
||||
from collections.abc import Callable, Iterable, Iterator
|
||||
from typing import Any, Protocol
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
class _Readable(Protocol):
|
||||
def read(self, size: int = ..., /) -> bytes: ...
|
||||
# Optional: def close(self) -> object: ...
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
pass
|
||||
else:
|
||||
# stable
|
||||
class StartResponse(Protocol):
|
||||
def __call__(
|
||||
self, status: str, headers: list[tuple[str, str]], exc_info: OptExcInfo | None = ..., /
|
||||
) -> Callable[[bytes], object]: ...
|
||||
|
||||
WSGIEnvironment: TypeAlias = dict[str, Any] # stable
|
||||
WSGIApplication: TypeAlias = Callable[[WSGIEnvironment, StartResponse], Iterable[bytes]] # stable
|
||||
|
||||
# WSGI input streams per PEP 3333, stable
|
||||
class InputStream(Protocol):
|
||||
def read(self, size: int = ..., /) -> bytes: ...
|
||||
def readline(self, size: int = ..., /) -> bytes: ...
|
||||
def readlines(self, hint: int = ..., /) -> list[bytes]: ...
|
||||
def __iter__(self) -> Iterator[bytes]: ...
|
||||
|
||||
# WSGI error streams per PEP 3333, stable
|
||||
class ErrorStream(Protocol):
|
||||
def flush(self) -> object: ...
|
||||
def write(self, s: str, /) -> object: ...
|
||||
def writelines(self, seq: list[str], /) -> object: ...
|
||||
|
||||
# Optional file wrapper in wsgi.file_wrapper
|
||||
class FileWrapper(Protocol):
|
||||
def __call__(self, file: _Readable, block_size: int = ..., /) -> Iterable[bytes]: ...
|
||||
@@ -0,0 +1,9 @@
|
||||
# See the README.md file in this directory for more information.
|
||||
|
||||
from typing import Any, Protocol
|
||||
|
||||
# As defined https://docs.python.org/3/library/xml.dom.html#domimplementation-objects
|
||||
class DOMImplementation(Protocol):
|
||||
def hasFeature(self, feature: str, version: str | None, /) -> bool: ...
|
||||
def createDocument(self, namespaceUri: str, qualifiedName: str, doctype: Any | None, /) -> Any: ...
|
||||
def createDocumentType(self, qualifiedName: str, publicId: str, systemId: str, /) -> Any: ...
|
||||
51
.venv/lib/python3.12/site-packages/stdlib/abc.pyi
Normal file
51
.venv/lib/python3.12/site-packages/stdlib/abc.pyi
Normal file
@@ -0,0 +1,51 @@
|
||||
import _typeshed
|
||||
import sys
|
||||
from _typeshed import SupportsWrite
|
||||
from collections.abc import Callable
|
||||
from typing import Any, Literal, TypeVar
|
||||
from typing_extensions import Concatenate, ParamSpec, deprecated
|
||||
|
||||
_T = TypeVar("_T")
|
||||
_R_co = TypeVar("_R_co", covariant=True)
|
||||
_FuncT = TypeVar("_FuncT", bound=Callable[..., Any])
|
||||
_P = ParamSpec("_P")
|
||||
|
||||
# These definitions have special processing in mypy
|
||||
class ABCMeta(type):
|
||||
__abstractmethods__: frozenset[str]
|
||||
if sys.version_info >= (3, 11):
|
||||
def __new__(
|
||||
mcls: type[_typeshed.Self], name: str, bases: tuple[type, ...], namespace: dict[str, Any], /, **kwargs: Any
|
||||
) -> _typeshed.Self: ...
|
||||
else:
|
||||
def __new__(
|
||||
mcls: type[_typeshed.Self], name: str, bases: tuple[type, ...], namespace: dict[str, Any], **kwargs: Any
|
||||
) -> _typeshed.Self: ...
|
||||
|
||||
def __instancecheck__(cls: ABCMeta, instance: Any) -> bool: ...
|
||||
def __subclasscheck__(cls: ABCMeta, subclass: type) -> bool: ...
|
||||
def _dump_registry(cls: ABCMeta, file: SupportsWrite[str] | None = None) -> None: ...
|
||||
def register(cls: ABCMeta, subclass: type[_T]) -> type[_T]: ...
|
||||
|
||||
def abstractmethod(funcobj: _FuncT) -> _FuncT: ...
|
||||
@deprecated("Use 'classmethod' with 'abstractmethod' instead")
|
||||
class abstractclassmethod(classmethod[_T, _P, _R_co]):
|
||||
__isabstractmethod__: Literal[True]
|
||||
def __init__(self, callable: Callable[Concatenate[type[_T], _P], _R_co]) -> None: ...
|
||||
|
||||
@deprecated("Use 'staticmethod' with 'abstractmethod' instead")
|
||||
class abstractstaticmethod(staticmethod[_P, _R_co]):
|
||||
__isabstractmethod__: Literal[True]
|
||||
def __init__(self, callable: Callable[_P, _R_co]) -> None: ...
|
||||
|
||||
@deprecated("Use 'property' with 'abstractmethod' instead")
|
||||
class abstractproperty(property):
|
||||
__isabstractmethod__: Literal[True]
|
||||
|
||||
class ABC(metaclass=ABCMeta):
|
||||
__slots__ = ()
|
||||
|
||||
def get_cache_token() -> object: ...
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
def update_abstractmethods(cls: type[_T]) -> type[_T]: ...
|
||||
496
.venv/lib/python3.12/site-packages/stdlib/array.pyi
Normal file
496
.venv/lib/python3.12/site-packages/stdlib/array.pyi
Normal file
@@ -0,0 +1,496 @@
|
||||
"""
|
||||
Efficient arrays of numeric data.
|
||||
|
||||
MicroPython module: https://docs.micropython.org/en/v1.26.0/library/array.html
|
||||
|
||||
CPython module: :mod:`python:array` https://docs.python.org/3/library/array.html .
|
||||
|
||||
Supported format codes: ``b``, ``B``, ``h``, ``H``, ``i``, ``I``, ``l``,
|
||||
``L``, ``q``, ``Q``, ``f``, ``d`` (the latter 2 depending on the
|
||||
floating-point support).
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
import sys
|
||||
from _typeshed import Incomplete, ReadableBuffer, SupportsRead, SupportsWrite
|
||||
from collections.abc import MutableSequence, Sequence, Iterable
|
||||
|
||||
# pytype crashes if array inherits from collections.abc.MutableSequence instead of typing.MutableSequence
|
||||
from typing import Generic, Any, Literal, MutableSequence, SupportsIndex, TypeVar, overload # noqa: Y022
|
||||
from typing_extensions import Awaitable, TypeVar, Self, TypeAlias
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
from types import GenericAlias
|
||||
|
||||
_IntTypeCode: TypeAlias = Literal["b", "B", "h", "H", "i", "I", "l", "L", "q", "Q"]
|
||||
_FloatTypeCode: TypeAlias = Literal["f", "d"]
|
||||
_UnicodeTypeCode: TypeAlias = Literal["u"]
|
||||
_TypeCode: TypeAlias = _IntTypeCode | _FloatTypeCode | _UnicodeTypeCode
|
||||
|
||||
_T = TypeVar("_T", int, float, str)
|
||||
|
||||
# typecodes: str
|
||||
|
||||
class array(MutableSequence[_T], Generic[_T]):
|
||||
"""
|
||||
|see_cpython_module| :mod:`python:array`.
|
||||
|
||||
Supported format codes: ``b``, ``B``, ``h``, ``H``, ``i``, ``I``, ``l``,
|
||||
``L``, ``q``, ``Q``, ``f``, ``d`` (the latter 2 depending on the
|
||||
floating-point support).
|
||||
|
||||
+-----------+--------------------+-------------------+-----------------------+
|
||||
| Type code | C Type | Python Type | Minimum size in bytes |
|
||||
+===========+====================+===================+=======================+
|
||||
| ``'b'`` | signed char | int | 1 |
|
||||
+-----------+--------------------+-------------------+-----------------------+
|
||||
| ``'B'`` | unsigned char | int | 1 |
|
||||
+-----------+--------------------+-------------------+-----------------------+
|
||||
| ``'h'`` | signed short | int | 2 |
|
||||
+-----------+--------------------+-------------------+-----------------------+
|
||||
| ``'H'`` | unsigned short | int | 2 |
|
||||
+-----------+--------------------+-------------------+-----------------------+
|
||||
| ``'i'`` | signed int | int | 2 |
|
||||
+-----------+--------------------+-------------------+-----------------------+
|
||||
| ``'I'`` | unsigned int | int | 2 |
|
||||
+-----------+--------------------+-------------------+-----------------------+
|
||||
| ``'l'`` | signed long | int | 4 |
|
||||
+-----------+--------------------+-------------------+-----------------------+
|
||||
| ``'L'`` | unsigned long | int | 4 |
|
||||
+-----------+--------------------+-------------------+-----------------------+
|
||||
| ``'q'`` | signed long long | int | 8 |
|
||||
+-----------+--------------------+-------------------+-----------------------+
|
||||
| ``'Q'`` | unsigned long long | int | 8 |
|
||||
+-----------+--------------------+-------------------+-----------------------+
|
||||
| ``'f'`` | float | float | 4 |
|
||||
+-----------+--------------------+-------------------+-----------------------+
|
||||
| ``'d'`` | double | float | 8 |
|
||||
+-----------+--------------------+-------------------+-----------------------+
|
||||
"""
|
||||
|
||||
@property
|
||||
def typecode(self) -> _TypeCode: ...
|
||||
@property
|
||||
def itemsize(self) -> int: ...
|
||||
@overload
|
||||
def __init__(self: array[int], typecode: _IntTypeCode, initializer: bytes | bytearray | Iterable[int] = ..., /) -> None: ...
|
||||
@overload
|
||||
def __init__(self: array[float], typecode: _FloatTypeCode, initializer: bytes | bytearray | Iterable[float] = ..., /) -> None: ...
|
||||
@overload
|
||||
def __init__(self: array[str], typecode: _UnicodeTypeCode, initializer: bytes | bytearray | Iterable[str] = ..., /) -> None: ...
|
||||
@overload
|
||||
def __init__(self, typecode: str, initializer: Iterable[_T], /) -> None: ...
|
||||
@overload
|
||||
def __init__(self, typecode: str, initializer: bytes | bytearray = ..., /) -> None: ...
|
||||
def append(self, val: Any, /) -> None:
|
||||
"""
|
||||
Append new element *val* to the end of array, growing it.
|
||||
"""
|
||||
...
|
||||
|
||||
def buffer_info(self) -> tuple[int, int]: ...
|
||||
def byteswap(self) -> None: ...
|
||||
def count(self, v: _T, /) -> int: ...
|
||||
def extend(self, iterable: Sequence[Any], /) -> None:
|
||||
"""
|
||||
Append new elements as contained in *iterable* to the end of
|
||||
array, growing it.
|
||||
"""
|
||||
...
|
||||
|
||||
def frombytes(self, buffer: ReadableBuffer, /) -> None: ...
|
||||
def fromfile(self, f: SupportsRead[bytes], n: int, /) -> None: ...
|
||||
def fromlist(self, list: list[_T], /) -> None: ...
|
||||
def fromunicode(self, ustr: str, /) -> None: ...
|
||||
if sys.version_info >= (3, 10):
|
||||
def index(self, v: _T, start: int = 0, stop: int = sys.maxsize, /) -> int: ...
|
||||
else:
|
||||
def index(self, v: _T, /) -> int: ... # type: ignore[override]
|
||||
|
||||
def insert(self, i: int, v: _T, /) -> None: ...
|
||||
def pop(self, i: int = -1, /) -> _T: ...
|
||||
def remove(self, v: _T, /) -> None: ...
|
||||
def tobytes(self) -> bytes: ...
|
||||
def tofile(self, f: SupportsWrite[bytes], /) -> None: ...
|
||||
def tolist(self) -> list[_T]: ...
|
||||
def tounicode(self) -> str: ...
|
||||
if sys.version_info < (3, 9):
|
||||
def fromstring(self, buffer: str | ReadableBuffer, /) -> None: ...
|
||||
def tostring(self) -> bytes: ...
|
||||
|
||||
def __len__(self) -> int:
|
||||
"""
|
||||
Returns the number of items in the array, called as ``len(a)`` (where ``a`` is an ``array``).
|
||||
|
||||
**Note:** ``__len__`` cannot be called directly (``a.__len__()`` fails) and the
|
||||
method is not present in ``__dict__``, however ``len(a)`` does work.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def __getitem__(self, key: SupportsIndex, /) -> _T: ...
|
||||
@overload
|
||||
def __getitem__(self, key: slice, /) -> array[_T]: ...
|
||||
@overload
|
||||
def __getitem__(self, index: int) -> _T:
|
||||
"""
|
||||
Indexed read of the array, called as ``a[index]`` (where ``a`` is an ``array``).
|
||||
Returns a value if *index* is an ``int`` and an ``array`` if *index* is a slice.
|
||||
Negative indices count from the end and ``IndexError`` is thrown if the index is
|
||||
out of range.
|
||||
|
||||
**Note:** ``__getitem__`` cannot be called directly (``a.__getitem__(index)`` fails) and
|
||||
is not present in ``__dict__``, however ``a[index]`` does work.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __getitem__(self, sl: slice) -> array[_T]:
|
||||
"""
|
||||
Indexed read of the array, called as ``a[index]`` (where ``a`` is an ``array``).
|
||||
Returns a value if *index* is an ``int`` and an ``array`` if *index* is a slice.
|
||||
Negative indices count from the end and ``IndexError`` is thrown if the index is
|
||||
out of range.
|
||||
|
||||
**Note:** ``__getitem__`` cannot be called directly (``a.__getitem__(index)`` fails) and
|
||||
is not present in ``__dict__``, however ``a[index]`` does work.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __getitem__(self, index: int) -> _T:
|
||||
"""
|
||||
Indexed read of the array, called as ``a[index]`` (where ``a`` is an ``array``).
|
||||
Returns a value if *index* is an ``int`` and an ``array`` if *index* is a slice.
|
||||
Negative indices count from the end and ``IndexError`` is thrown if the index is
|
||||
out of range.
|
||||
|
||||
**Note:** ``__getitem__`` cannot be called directly (``a.__getitem__(index)`` fails) and
|
||||
is not present in ``__dict__``, however ``a[index]`` does work.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __getitem__(self, sl: slice) -> array[_T]:
|
||||
"""
|
||||
Indexed read of the array, called as ``a[index]`` (where ``a`` is an ``array``).
|
||||
Returns a value if *index* is an ``int`` and an ``array`` if *index* is a slice.
|
||||
Negative indices count from the end and ``IndexError`` is thrown if the index is
|
||||
out of range.
|
||||
|
||||
**Note:** ``__getitem__`` cannot be called directly (``a.__getitem__(index)`` fails) and
|
||||
is not present in ``__dict__``, however ``a[index]`` does work.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __getitem__(self, index: int) -> _T:
|
||||
"""
|
||||
Indexed read of the array, called as ``a[index]`` (where ``a`` is an ``array``).
|
||||
Returns a value if *index* is an ``int`` and an ``array`` if *index* is a slice.
|
||||
Negative indices count from the end and ``IndexError`` is thrown if the index is
|
||||
out of range.
|
||||
|
||||
**Note:** ``__getitem__`` cannot be called directly (``a.__getitem__(index)`` fails) and
|
||||
is not present in ``__dict__``, however ``a[index]`` does work.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __getitem__(self, sl: slice) -> array[_T]:
|
||||
"""
|
||||
Indexed read of the array, called as ``a[index]`` (where ``a`` is an ``array``).
|
||||
Returns a value if *index* is an ``int`` and an ``array`` if *index* is a slice.
|
||||
Negative indices count from the end and ``IndexError`` is thrown if the index is
|
||||
out of range.
|
||||
|
||||
**Note:** ``__getitem__`` cannot be called directly (``a.__getitem__(index)`` fails) and
|
||||
is not present in ``__dict__``, however ``a[index]`` does work.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __getitem__(self, index: int) -> _T:
|
||||
"""
|
||||
Indexed read of the array, called as ``a[index]`` (where ``a`` is an ``array``).
|
||||
Returns a value if *index* is an ``int`` and an ``array`` if *index* is a slice.
|
||||
Negative indices count from the end and ``IndexError`` is thrown if the index is
|
||||
out of range.
|
||||
|
||||
**Note:** ``__getitem__`` cannot be called directly (``a.__getitem__(index)`` fails) and
|
||||
is not present in ``__dict__``, however ``a[index]`` does work.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __getitem__(self, sl: slice) -> array[_T]:
|
||||
"""
|
||||
Indexed read of the array, called as ``a[index]`` (where ``a`` is an ``array``).
|
||||
Returns a value if *index* is an ``int`` and an ``array`` if *index* is a slice.
|
||||
Negative indices count from the end and ``IndexError`` is thrown if the index is
|
||||
out of range.
|
||||
|
||||
**Note:** ``__getitem__`` cannot be called directly (``a.__getitem__(index)`` fails) and
|
||||
is not present in ``__dict__``, however ``a[index]`` does work.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __getitem__(self, index: int) -> _T:
|
||||
"""
|
||||
Indexed read of the array, called as ``a[index]`` (where ``a`` is an ``array``).
|
||||
Returns a value if *index* is an ``int`` and an ``array`` if *index* is a slice.
|
||||
Negative indices count from the end and ``IndexError`` is thrown if the index is
|
||||
out of range.
|
||||
|
||||
**Note:** ``__getitem__`` cannot be called directly (``a.__getitem__(index)`` fails) and
|
||||
is not present in ``__dict__``, however ``a[index]`` does work.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __getitem__(self, sl: slice) -> array[_T]:
|
||||
"""
|
||||
Indexed read of the array, called as ``a[index]`` (where ``a`` is an ``array``).
|
||||
Returns a value if *index* is an ``int`` and an ``array`` if *index* is a slice.
|
||||
Negative indices count from the end and ``IndexError`` is thrown if the index is
|
||||
out of range.
|
||||
|
||||
**Note:** ``__getitem__`` cannot be called directly (``a.__getitem__(index)`` fails) and
|
||||
is not present in ``__dict__``, however ``a[index]`` does work.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __getitem__(self, index: int) -> _T:
|
||||
"""
|
||||
Indexed read of the array, called as ``a[index]`` (where ``a`` is an ``array``).
|
||||
Returns a value if *index* is an ``int`` and an ``array`` if *index* is a slice.
|
||||
Negative indices count from the end and ``IndexError`` is thrown if the index is
|
||||
out of range.
|
||||
|
||||
**Note:** ``__getitem__`` cannot be called directly (``a.__getitem__(index)`` fails) and
|
||||
is not present in ``__dict__``, however ``a[index]`` does work.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __getitem__(self, sl: slice) -> array[_T]:
|
||||
"""
|
||||
Indexed read of the array, called as ``a[index]`` (where ``a`` is an ``array``).
|
||||
Returns a value if *index* is an ``int`` and an ``array`` if *index* is a slice.
|
||||
Negative indices count from the end and ``IndexError`` is thrown if the index is
|
||||
out of range.
|
||||
|
||||
**Note:** ``__getitem__`` cannot be called directly (``a.__getitem__(index)`` fails) and
|
||||
is not present in ``__dict__``, however ``a[index]`` does work.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __getitem__(self, index: int) -> _T:
|
||||
"""
|
||||
Indexed read of the array, called as ``a[index]`` (where ``a`` is an ``array``).
|
||||
Returns a value if *index* is an ``int`` and an ``array`` if *index* is a slice.
|
||||
Negative indices count from the end and ``IndexError`` is thrown if the index is
|
||||
out of range.
|
||||
|
||||
**Note:** ``__getitem__`` cannot be called directly (``a.__getitem__(index)`` fails) and
|
||||
is not present in ``__dict__``, however ``a[index]`` does work.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __getitem__(self, sl: slice) -> array[_T]:
|
||||
"""
|
||||
Indexed read of the array, called as ``a[index]`` (where ``a`` is an ``array``).
|
||||
Returns a value if *index* is an ``int`` and an ``array`` if *index* is a slice.
|
||||
Negative indices count from the end and ``IndexError`` is thrown if the index is
|
||||
out of range.
|
||||
|
||||
**Note:** ``__getitem__`` cannot be called directly (``a.__getitem__(index)`` fails) and
|
||||
is not present in ``__dict__``, however ``a[index]`` does work.
|
||||
"""
|
||||
|
||||
@overload # type: ignore[override]
|
||||
def __setitem__(self, key: SupportsIndex, value: _T, /) -> None: ...
|
||||
@overload
|
||||
def __setitem__(self, key: slice, value: array[_T], /) -> None: ...
|
||||
@overload
|
||||
def __setitem__(self, index: int, value: _T) -> None:
|
||||
"""
|
||||
Indexed write into the array, called as ``a[index] = value`` (where ``a`` is an ``array``).
|
||||
``value`` is a single value if *index* is an ``int`` and an ``array`` if *index* is a slice.
|
||||
Negative indices count from the end and ``IndexError`` is thrown if the index is out of range.
|
||||
|
||||
**Note:** ``__setitem__`` cannot be called directly (``a.__setitem__(index, value)`` fails) and
|
||||
is not present in ``__dict__``, however ``a[index] = value`` does work.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __setitem__(self, sl: slice, values: array[_T]) -> None:
|
||||
"""
|
||||
Indexed write into the array, called as ``a[index] = value`` (where ``a`` is an ``array``).
|
||||
``value`` is a single value if *index* is an ``int`` and an ``array`` if *index* is a slice.
|
||||
Negative indices count from the end and ``IndexError`` is thrown if the index is out of range.
|
||||
|
||||
**Note:** ``__setitem__`` cannot be called directly (``a.__setitem__(index, value)`` fails) and
|
||||
is not present in ``__dict__``, however ``a[index] = value`` does work.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __setitem__(self, index: int, value: _T) -> None:
|
||||
"""
|
||||
Indexed write into the array, called as ``a[index] = value`` (where ``a`` is an ``array``).
|
||||
``value`` is a single value if *index* is an ``int`` and an ``array`` if *index* is a slice.
|
||||
Negative indices count from the end and ``IndexError`` is thrown if the index is out of range.
|
||||
|
||||
**Note:** ``__setitem__`` cannot be called directly (``a.__setitem__(index, value)`` fails) and
|
||||
is not present in ``__dict__``, however ``a[index] = value`` does work.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __setitem__(self, sl: slice, values: array[_T]) -> None:
|
||||
"""
|
||||
Indexed write into the array, called as ``a[index] = value`` (where ``a`` is an ``array``).
|
||||
``value`` is a single value if *index* is an ``int`` and an ``array`` if *index* is a slice.
|
||||
Negative indices count from the end and ``IndexError`` is thrown if the index is out of range.
|
||||
|
||||
**Note:** ``__setitem__`` cannot be called directly (``a.__setitem__(index, value)`` fails) and
|
||||
is not present in ``__dict__``, however ``a[index] = value`` does work.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __setitem__(self, index: int, value: _T) -> None:
|
||||
"""
|
||||
Indexed write into the array, called as ``a[index] = value`` (where ``a`` is an ``array``).
|
||||
``value`` is a single value if *index* is an ``int`` and an ``array`` if *index* is a slice.
|
||||
Negative indices count from the end and ``IndexError`` is thrown if the index is out of range.
|
||||
|
||||
**Note:** ``__setitem__`` cannot be called directly (``a.__setitem__(index, value)`` fails) and
|
||||
is not present in ``__dict__``, however ``a[index] = value`` does work.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __setitem__(self, sl: slice, values: array[_T]) -> None:
|
||||
"""
|
||||
Indexed write into the array, called as ``a[index] = value`` (where ``a`` is an ``array``).
|
||||
``value`` is a single value if *index* is an ``int`` and an ``array`` if *index* is a slice.
|
||||
Negative indices count from the end and ``IndexError`` is thrown if the index is out of range.
|
||||
|
||||
**Note:** ``__setitem__`` cannot be called directly (``a.__setitem__(index, value)`` fails) and
|
||||
is not present in ``__dict__``, however ``a[index] = value`` does work.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __setitem__(self, index: int, value: _T) -> None:
|
||||
"""
|
||||
Indexed write into the array, called as ``a[index] = value`` (where ``a`` is an ``array``).
|
||||
``value`` is a single value if *index* is an ``int`` and an ``array`` if *index* is a slice.
|
||||
Negative indices count from the end and ``IndexError`` is thrown if the index is out of range.
|
||||
|
||||
**Note:** ``__setitem__`` cannot be called directly (``a.__setitem__(index, value)`` fails) and
|
||||
is not present in ``__dict__``, however ``a[index] = value`` does work.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __setitem__(self, sl: slice, values: array[_T]) -> None:
|
||||
"""
|
||||
Indexed write into the array, called as ``a[index] = value`` (where ``a`` is an ``array``).
|
||||
``value`` is a single value if *index* is an ``int`` and an ``array`` if *index* is a slice.
|
||||
Negative indices count from the end and ``IndexError`` is thrown if the index is out of range.
|
||||
|
||||
**Note:** ``__setitem__`` cannot be called directly (``a.__setitem__(index, value)`` fails) and
|
||||
is not present in ``__dict__``, however ``a[index] = value`` does work.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __setitem__(self, index: int, value: _T) -> None:
|
||||
"""
|
||||
Indexed write into the array, called as ``a[index] = value`` (where ``a`` is an ``array``).
|
||||
``value`` is a single value if *index* is an ``int`` and an ``array`` if *index* is a slice.
|
||||
Negative indices count from the end and ``IndexError`` is thrown if the index is out of range.
|
||||
|
||||
**Note:** ``__setitem__`` cannot be called directly (``a.__setitem__(index, value)`` fails) and
|
||||
is not present in ``__dict__``, however ``a[index] = value`` does work.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __setitem__(self, sl: slice, values: array[_T]) -> None:
|
||||
"""
|
||||
Indexed write into the array, called as ``a[index] = value`` (where ``a`` is an ``array``).
|
||||
``value`` is a single value if *index* is an ``int`` and an ``array`` if *index* is a slice.
|
||||
Negative indices count from the end and ``IndexError`` is thrown if the index is out of range.
|
||||
|
||||
**Note:** ``__setitem__`` cannot be called directly (``a.__setitem__(index, value)`` fails) and
|
||||
is not present in ``__dict__``, however ``a[index] = value`` does work.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __setitem__(self, index: int, value: _T) -> None:
|
||||
"""
|
||||
Indexed write into the array, called as ``a[index] = value`` (where ``a`` is an ``array``).
|
||||
``value`` is a single value if *index* is an ``int`` and an ``array`` if *index* is a slice.
|
||||
Negative indices count from the end and ``IndexError`` is thrown if the index is out of range.
|
||||
|
||||
**Note:** ``__setitem__`` cannot be called directly (``a.__setitem__(index, value)`` fails) and
|
||||
is not present in ``__dict__``, however ``a[index] = value`` does work.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __setitem__(self, sl: slice, values: array[_T]) -> None:
|
||||
"""
|
||||
Indexed write into the array, called as ``a[index] = value`` (where ``a`` is an ``array``).
|
||||
``value`` is a single value if *index* is an ``int`` and an ``array`` if *index* is a slice.
|
||||
Negative indices count from the end and ``IndexError`` is thrown if the index is out of range.
|
||||
|
||||
**Note:** ``__setitem__`` cannot be called directly (``a.__setitem__(index, value)`` fails) and
|
||||
is not present in ``__dict__``, however ``a[index] = value`` does work.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __setitem__(self, index: int, value: _T) -> None:
|
||||
"""
|
||||
Indexed write into the array, called as ``a[index] = value`` (where ``a`` is an ``array``).
|
||||
``value`` is a single value if *index* is an ``int`` and an ``array`` if *index* is a slice.
|
||||
Negative indices count from the end and ``IndexError`` is thrown if the index is out of range.
|
||||
|
||||
**Note:** ``__setitem__`` cannot be called directly (``a.__setitem__(index, value)`` fails) and
|
||||
is not present in ``__dict__``, however ``a[index] = value`` does work.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __setitem__(self, sl: slice, values: array[_T]) -> None:
|
||||
"""
|
||||
Indexed write into the array, called as ``a[index] = value`` (where ``a`` is an ``array``).
|
||||
``value`` is a single value if *index* is an ``int`` and an ``array`` if *index* is a slice.
|
||||
Negative indices count from the end and ``IndexError`` is thrown if the index is out of range.
|
||||
|
||||
**Note:** ``__setitem__`` cannot be called directly (``a.__setitem__(index, value)`` fails) and
|
||||
is not present in ``__dict__``, however ``a[index] = value`` does work.
|
||||
"""
|
||||
|
||||
def __delitem__(self, key: SupportsIndex | slice, /) -> None: ...
|
||||
def __add__(self, other: array[_T]) -> array[_T]:
|
||||
"""
|
||||
Return a new ``array`` that is the concatenation of the array with *other*, called as
|
||||
``a + other`` (where ``a`` and *other* are both ``arrays``).
|
||||
|
||||
**Note:** ``__add__`` cannot be called directly (``a.__add__(other)`` fails) and
|
||||
is not present in ``__dict__``, however ``a + other`` does work.
|
||||
"""
|
||||
...
|
||||
|
||||
def __eq__(self, value: object, /) -> bool: ...
|
||||
def __ge__(self, value: array[_T], /) -> bool: ...
|
||||
def __gt__(self, value: array[_T], /) -> bool: ...
|
||||
def __iadd__(self, other: array[_T]) -> Self:
|
||||
"""
|
||||
Concatenates the array with *other* in-place, called as ``a += other`` (where ``a`` and *other*
|
||||
are both ``arrays``). Equivalent to ``extend(other)``.
|
||||
|
||||
**Note:** ``__iadd__`` cannot be called directly (``a.__iadd__(other)`` fails) and
|
||||
is not present in ``__dict__``, however ``a += other`` does work.
|
||||
"""
|
||||
...
|
||||
|
||||
def __imul__(self, value: int, /) -> Self: ...
|
||||
def __le__(self, value: array[_T], /) -> bool: ...
|
||||
def __lt__(self, value: array[_T], /) -> bool: ...
|
||||
def __mul__(self, value: int, /) -> array[_T]: ...
|
||||
def __rmul__(self, value: int, /) -> array[_T]: ...
|
||||
def __copy__(self) -> array[_T]: ...
|
||||
def __deepcopy__(self, unused: Any, /) -> array[_T]: ...
|
||||
def __buffer__(self, flags: int, /) -> memoryview: ...
|
||||
def __release_buffer__(self, buffer: memoryview, /) -> None: ...
|
||||
if sys.version_info >= (3, 12):
|
||||
def __class_getitem__(cls, item: Any, /) -> GenericAlias: ...
|
||||
|
||||
# ArrayType = array
|
||||
@@ -0,0 +1,45 @@
|
||||
import sys
|
||||
from collections.abc import Awaitable, Coroutine, Generator
|
||||
from typing import Any, TypeVar
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
# As at runtime, this depends on all submodules defining __all__ accurately.
|
||||
from .base_events import *
|
||||
from .coroutines import *
|
||||
from .events import *
|
||||
from .exceptions import *
|
||||
from .futures import *
|
||||
from .locks import *
|
||||
from .protocols import *
|
||||
from .queues import *
|
||||
from .runners import *
|
||||
from .streams import *
|
||||
|
||||
# from .subprocess import *
|
||||
from .tasks import *
|
||||
from .tasks import sleep_ms as sleep_ms
|
||||
from .micropython import *
|
||||
|
||||
from .transports import *
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
from .threads import *
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
from .taskgroups import *
|
||||
from .timeouts import *
|
||||
|
||||
if sys.platform == "win32":
|
||||
from .windows_events import *
|
||||
else:
|
||||
from .unix_events import *
|
||||
|
||||
_T_co = TypeVar("_T_co", covariant=True)
|
||||
|
||||
# Aliases imported by multiple submodules in typeshed
|
||||
if sys.version_info >= (3, 12):
|
||||
_AwaitableLike: TypeAlias = Awaitable[_T_co] # noqa: Y047
|
||||
_CoroutineLike: TypeAlias = Coroutine[Any, Any, _T_co] # noqa: Y047
|
||||
else:
|
||||
_AwaitableLike: TypeAlias = Generator[Any, None, _T_co] | Awaitable[_T_co]
|
||||
_CoroutineLike: TypeAlias = Generator[Any, None, _T_co] | Coroutine[Any, Any, _T_co]
|
||||
@@ -0,0 +1,482 @@
|
||||
import ssl
|
||||
import sys
|
||||
from _typeshed import FileDescriptorLike, ReadableBuffer, WriteableBuffer
|
||||
from asyncio import _AwaitableLike, _CoroutineLike
|
||||
from asyncio.events import AbstractEventLoop, AbstractServer, Handle, TimerHandle, _TaskFactory
|
||||
from asyncio.futures import Future
|
||||
from asyncio.protocols import BaseProtocol
|
||||
from asyncio.tasks import Task
|
||||
from asyncio.transports import BaseTransport, DatagramTransport, ReadTransport, SubprocessTransport, Transport, WriteTransport
|
||||
from collections.abc import Callable, Iterable, Sequence
|
||||
from contextvars import Context # type: ignore
|
||||
from socket import AddressFamily, SocketKind, _Address, _RetAddress, socket
|
||||
from typing import IO, Any, Literal, TypeVar, overload
|
||||
from typing_extensions import TypeAlias, TypeVarTuple, Unpack
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
__all__ = ("BaseEventLoop", "Server")
|
||||
else:
|
||||
__all__ = ("BaseEventLoop",)
|
||||
|
||||
_T = TypeVar("_T")
|
||||
_Ts = TypeVarTuple("_Ts")
|
||||
_ProtocolT = TypeVar("_ProtocolT", bound=BaseProtocol)
|
||||
_Context: TypeAlias = dict[str, Any]
|
||||
_ExceptionHandler: TypeAlias = Callable[[AbstractEventLoop, _Context], object]
|
||||
_ProtocolFactory: TypeAlias = Callable[[], BaseProtocol]
|
||||
_SSLContext: TypeAlias = bool | None | ssl.SSLContext
|
||||
|
||||
class Server(AbstractServer):
|
||||
if sys.version_info >= (3, 11):
|
||||
def __init__(
|
||||
self,
|
||||
loop: AbstractEventLoop,
|
||||
sockets: Iterable[socket],
|
||||
protocol_factory: _ProtocolFactory,
|
||||
ssl_context: _SSLContext,
|
||||
backlog: int,
|
||||
ssl_handshake_timeout: float | None,
|
||||
ssl_shutdown_timeout: float | None = None,
|
||||
) -> None: ...
|
||||
else:
|
||||
def __init__(
|
||||
self,
|
||||
loop: AbstractEventLoop,
|
||||
sockets: Iterable[socket],
|
||||
protocol_factory: _ProtocolFactory,
|
||||
ssl_context: _SSLContext,
|
||||
backlog: int,
|
||||
ssl_handshake_timeout: float | None,
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
def close_clients(self) -> None: ...
|
||||
def abort_clients(self) -> None: ...
|
||||
|
||||
def get_loop(self) -> AbstractEventLoop: ...
|
||||
def is_serving(self) -> bool: ...
|
||||
async def start_serving(self) -> None: ...
|
||||
async def serve_forever(self) -> None: ...
|
||||
@property
|
||||
def sockets(self) -> tuple[socket, ...]: ...
|
||||
def close(self) -> None: ...
|
||||
async def wait_closed(self) -> None: ...
|
||||
|
||||
class BaseEventLoop(AbstractEventLoop):
|
||||
def run_forever(self) -> None: ...
|
||||
def run_until_complete(self, future: _AwaitableLike[_T]) -> _T: ...
|
||||
def stop(self) -> None: ...
|
||||
def is_running(self) -> bool: ...
|
||||
def is_closed(self) -> bool: ...
|
||||
def close(self) -> None: ...
|
||||
async def shutdown_asyncgens(self) -> None: ...
|
||||
# Methods scheduling callbacks. All these return Handles.
|
||||
def call_soon(self, callback: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts], context: Context | None = None) -> Handle: ... # type: ignore
|
||||
def call_later(
|
||||
self, delay: float, callback: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts], context: Context | None = None # type: ignore
|
||||
) -> TimerHandle: ...
|
||||
def call_at(
|
||||
self, when: float, callback: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts], context: Context | None = None # type: ignore
|
||||
) -> TimerHandle: ...
|
||||
def time(self) -> float: ...
|
||||
# Future methods
|
||||
def create_future(self) -> Future[Any]: ...
|
||||
# Tasks methods
|
||||
if sys.version_info >= (3, 11):
|
||||
def create_task(self, coro: _CoroutineLike[_T], *, name: object = None, context: Context | None = None) -> Task[_T]: ... # type: ignore
|
||||
else:
|
||||
def create_task(self, coro: _CoroutineLike[_T], *, name: object = None) -> Task[_T]: ...
|
||||
|
||||
def set_task_factory(self, factory: _TaskFactory | None) -> None: ...
|
||||
def get_task_factory(self) -> _TaskFactory | None: ...
|
||||
# Methods for interacting with threads
|
||||
def call_soon_threadsafe(
|
||||
self, callback: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts], context: Context | None = None # type: ignore
|
||||
) -> Handle: ...
|
||||
def run_in_executor(self, executor: Any, func: Callable[[Unpack[_Ts]], _T], *args: Unpack[_Ts]) -> Future[_T]: ...
|
||||
def set_default_executor(self, executor: Any) -> None: ...
|
||||
# Network I/O methods returning Futures.
|
||||
async def getaddrinfo(
|
||||
self,
|
||||
host: bytes | str | None,
|
||||
port: bytes | str | int | None,
|
||||
*,
|
||||
family: int = 0,
|
||||
type: int = 0,
|
||||
proto: int = 0,
|
||||
flags: int = 0,
|
||||
) -> list[tuple[AddressFamily, SocketKind, int, str, tuple[str, int] | tuple[str, int, int, int]]]: ...
|
||||
async def getnameinfo(self, sockaddr: tuple[str, int] | tuple[str, int, int, int], flags: int = 0) -> tuple[str, str]: ...
|
||||
if sys.version_info >= (3, 12):
|
||||
@overload
|
||||
async def create_connection(
|
||||
self,
|
||||
protocol_factory: Callable[[], _ProtocolT],
|
||||
host: str = ...,
|
||||
port: int = ...,
|
||||
*,
|
||||
ssl: _SSLContext = None,
|
||||
family: int = 0,
|
||||
proto: int = 0,
|
||||
flags: int = 0,
|
||||
sock: None = None,
|
||||
local_addr: tuple[str, int] | None = None,
|
||||
server_hostname: str | None = None,
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
ssl_shutdown_timeout: float | None = None,
|
||||
happy_eyeballs_delay: float | None = None,
|
||||
interleave: int | None = None,
|
||||
all_errors: bool = False,
|
||||
) -> tuple[Transport, _ProtocolT]: ...
|
||||
@overload
|
||||
async def create_connection(
|
||||
self,
|
||||
protocol_factory: Callable[[], _ProtocolT],
|
||||
host: None = None,
|
||||
port: None = None,
|
||||
*,
|
||||
ssl: _SSLContext = None,
|
||||
family: int = 0,
|
||||
proto: int = 0,
|
||||
flags: int = 0,
|
||||
sock: socket,
|
||||
local_addr: None = None,
|
||||
server_hostname: str | None = None,
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
ssl_shutdown_timeout: float | None = None,
|
||||
happy_eyeballs_delay: float | None = None,
|
||||
interleave: int | None = None,
|
||||
all_errors: bool = False,
|
||||
) -> tuple[Transport, _ProtocolT]: ...
|
||||
elif sys.version_info >= (3, 11):
|
||||
@overload
|
||||
async def create_connection(
|
||||
self,
|
||||
protocol_factory: Callable[[], _ProtocolT],
|
||||
host: str = ...,
|
||||
port: int = ...,
|
||||
*,
|
||||
ssl: _SSLContext = None,
|
||||
family: int = 0,
|
||||
proto: int = 0,
|
||||
flags: int = 0,
|
||||
sock: None = None,
|
||||
local_addr: tuple[str, int] | None = None,
|
||||
server_hostname: str | None = None,
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
ssl_shutdown_timeout: float | None = None,
|
||||
happy_eyeballs_delay: float | None = None,
|
||||
interleave: int | None = None,
|
||||
) -> tuple[Transport, _ProtocolT]: ...
|
||||
@overload
|
||||
async def create_connection(
|
||||
self,
|
||||
protocol_factory: Callable[[], _ProtocolT],
|
||||
host: None = None,
|
||||
port: None = None,
|
||||
*,
|
||||
ssl: _SSLContext = None,
|
||||
family: int = 0,
|
||||
proto: int = 0,
|
||||
flags: int = 0,
|
||||
sock: socket,
|
||||
local_addr: None = None,
|
||||
server_hostname: str | None = None,
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
ssl_shutdown_timeout: float | None = None,
|
||||
happy_eyeballs_delay: float | None = None,
|
||||
interleave: int | None = None,
|
||||
) -> tuple[Transport, _ProtocolT]: ...
|
||||
else:
|
||||
@overload
|
||||
async def create_connection(
|
||||
self,
|
||||
protocol_factory: Callable[[], _ProtocolT],
|
||||
host: str = ...,
|
||||
port: int = ...,
|
||||
*,
|
||||
ssl: _SSLContext = None,
|
||||
family: int = 0,
|
||||
proto: int = 0,
|
||||
flags: int = 0,
|
||||
sock: None = None,
|
||||
local_addr: tuple[str, int] | None = None,
|
||||
server_hostname: str | None = None,
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
happy_eyeballs_delay: float | None = None,
|
||||
interleave: int | None = None,
|
||||
) -> tuple[Transport, _ProtocolT]: ...
|
||||
@overload
|
||||
async def create_connection(
|
||||
self,
|
||||
protocol_factory: Callable[[], _ProtocolT],
|
||||
host: None = None,
|
||||
port: None = None,
|
||||
*,
|
||||
ssl: _SSLContext = None,
|
||||
family: int = 0,
|
||||
proto: int = 0,
|
||||
flags: int = 0,
|
||||
sock: socket,
|
||||
local_addr: None = None,
|
||||
server_hostname: str | None = None,
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
happy_eyeballs_delay: float | None = None,
|
||||
interleave: int | None = None,
|
||||
) -> tuple[Transport, _ProtocolT]: ...
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
# 3.13 added `keep_alive`.
|
||||
@overload
|
||||
async def create_server(
|
||||
self,
|
||||
protocol_factory: _ProtocolFactory,
|
||||
host: str | Sequence[str] | None = None,
|
||||
port: int = ...,
|
||||
*,
|
||||
family: int = ...,
|
||||
flags: int = ...,
|
||||
sock: None = None,
|
||||
backlog: int = 100,
|
||||
ssl: _SSLContext = None,
|
||||
reuse_address: bool | None = None,
|
||||
reuse_port: bool | None = None,
|
||||
keep_alive: bool | None = None,
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
ssl_shutdown_timeout: float | None = None,
|
||||
start_serving: bool = True,
|
||||
) -> Server: ...
|
||||
@overload
|
||||
async def create_server(
|
||||
self,
|
||||
protocol_factory: _ProtocolFactory,
|
||||
host: None = None,
|
||||
port: None = None,
|
||||
*,
|
||||
family: int = ...,
|
||||
flags: int = ...,
|
||||
sock: socket = ...,
|
||||
backlog: int = 100,
|
||||
ssl: _SSLContext = None,
|
||||
reuse_address: bool | None = None,
|
||||
reuse_port: bool | None = None,
|
||||
keep_alive: bool | None = None,
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
ssl_shutdown_timeout: float | None = None,
|
||||
start_serving: bool = True,
|
||||
) -> Server: ...
|
||||
elif sys.version_info >= (3, 11):
|
||||
@overload
|
||||
async def create_server(
|
||||
self,
|
||||
protocol_factory: _ProtocolFactory,
|
||||
host: str | Sequence[str] | None = None,
|
||||
port: int = ...,
|
||||
*,
|
||||
family: int = ...,
|
||||
flags: int = ...,
|
||||
sock: None = None,
|
||||
backlog: int = 100,
|
||||
ssl: _SSLContext = None,
|
||||
reuse_address: bool | None = None,
|
||||
reuse_port: bool | None = None,
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
ssl_shutdown_timeout: float | None = None,
|
||||
start_serving: bool = True,
|
||||
) -> Server: ...
|
||||
@overload
|
||||
async def create_server(
|
||||
self,
|
||||
protocol_factory: _ProtocolFactory,
|
||||
host: None = None,
|
||||
port: None = None,
|
||||
*,
|
||||
family: int = ...,
|
||||
flags: int = ...,
|
||||
sock: socket = ...,
|
||||
backlog: int = 100,
|
||||
ssl: _SSLContext = None,
|
||||
reuse_address: bool | None = None,
|
||||
reuse_port: bool | None = None,
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
ssl_shutdown_timeout: float | None = None,
|
||||
start_serving: bool = True,
|
||||
) -> Server: ...
|
||||
else:
|
||||
@overload
|
||||
async def create_server(
|
||||
self,
|
||||
protocol_factory: _ProtocolFactory,
|
||||
host: str | Sequence[str] | None = None,
|
||||
port: int = ...,
|
||||
*,
|
||||
family: int = ...,
|
||||
flags: int = ...,
|
||||
sock: None = None,
|
||||
backlog: int = 100,
|
||||
ssl: _SSLContext = None,
|
||||
reuse_address: bool | None = None,
|
||||
reuse_port: bool | None = None,
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
start_serving: bool = True,
|
||||
) -> Server: ...
|
||||
@overload
|
||||
async def create_server(
|
||||
self,
|
||||
protocol_factory: _ProtocolFactory,
|
||||
host: None = None,
|
||||
port: None = None,
|
||||
*,
|
||||
family: int = ...,
|
||||
flags: int = ...,
|
||||
sock: socket = ...,
|
||||
backlog: int = 100,
|
||||
ssl: _SSLContext = None,
|
||||
reuse_address: bool | None = None,
|
||||
reuse_port: bool | None = None,
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
start_serving: bool = True,
|
||||
) -> Server: ...
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
async def start_tls(
|
||||
self,
|
||||
transport: BaseTransport,
|
||||
protocol: BaseProtocol,
|
||||
sslcontext: ssl.SSLContext,
|
||||
*,
|
||||
server_side: bool = False,
|
||||
server_hostname: str | None = None,
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
ssl_shutdown_timeout: float | None = None,
|
||||
) -> Transport | None: ...
|
||||
async def connect_accepted_socket(
|
||||
self,
|
||||
protocol_factory: Callable[[], _ProtocolT],
|
||||
sock: socket,
|
||||
*,
|
||||
ssl: _SSLContext = None,
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
ssl_shutdown_timeout: float | None = None,
|
||||
) -> tuple[Transport, _ProtocolT]: ...
|
||||
else:
|
||||
async def start_tls(
|
||||
self,
|
||||
transport: BaseTransport,
|
||||
protocol: BaseProtocol,
|
||||
sslcontext: ssl.SSLContext,
|
||||
*,
|
||||
server_side: bool = False,
|
||||
server_hostname: str | None = None,
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
) -> Transport | None: ...
|
||||
async def connect_accepted_socket(
|
||||
self,
|
||||
protocol_factory: Callable[[], _ProtocolT],
|
||||
sock: socket,
|
||||
*,
|
||||
ssl: _SSLContext = None,
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
) -> tuple[Transport, _ProtocolT]: ...
|
||||
|
||||
async def sock_sendfile(
|
||||
self, sock: socket, file: IO[bytes], offset: int = 0, count: int | None = None, *, fallback: bool | None = True
|
||||
) -> int: ...
|
||||
async def sendfile(
|
||||
self, transport: WriteTransport, file: IO[bytes], offset: int = 0, count: int | None = None, *, fallback: bool = True
|
||||
) -> int: ...
|
||||
if sys.version_info >= (3, 11):
|
||||
async def create_datagram_endpoint( # type: ignore[override]
|
||||
self,
|
||||
protocol_factory: Callable[[], _ProtocolT],
|
||||
local_addr: tuple[str, int] | str | None = None,
|
||||
remote_addr: tuple[str, int] | str | None = None,
|
||||
*,
|
||||
family: int = 0,
|
||||
proto: int = 0,
|
||||
flags: int = 0,
|
||||
reuse_port: bool | None = None,
|
||||
allow_broadcast: bool | None = None,
|
||||
sock: socket | None = None,
|
||||
) -> tuple[DatagramTransport, _ProtocolT]: ...
|
||||
else:
|
||||
async def create_datagram_endpoint(
|
||||
self,
|
||||
protocol_factory: Callable[[], _ProtocolT],
|
||||
local_addr: tuple[str, int] | str | None = None,
|
||||
remote_addr: tuple[str, int] | str | None = None,
|
||||
*,
|
||||
family: int = 0,
|
||||
proto: int = 0,
|
||||
flags: int = 0,
|
||||
reuse_address: bool | None = ...,
|
||||
reuse_port: bool | None = None,
|
||||
allow_broadcast: bool | None = None,
|
||||
sock: socket | None = None,
|
||||
) -> tuple[DatagramTransport, _ProtocolT]: ...
|
||||
# Pipes and subprocesses.
|
||||
async def connect_read_pipe(self, protocol_factory: Callable[[], _ProtocolT], pipe: Any) -> tuple[ReadTransport, _ProtocolT]: ...
|
||||
async def connect_write_pipe(self, protocol_factory: Callable[[], _ProtocolT], pipe: Any) -> tuple[WriteTransport, _ProtocolT]: ...
|
||||
async def subprocess_shell(
|
||||
self,
|
||||
protocol_factory: Callable[[], _ProtocolT],
|
||||
cmd: bytes | str,
|
||||
*,
|
||||
stdin: int | IO[Any] | None = -1,
|
||||
stdout: int | IO[Any] | None = -1,
|
||||
stderr: int | IO[Any] | None = -1,
|
||||
universal_newlines: Literal[False] = False,
|
||||
shell: Literal[True] = True,
|
||||
bufsize: Literal[0] = 0,
|
||||
encoding: None = None,
|
||||
errors: None = None,
|
||||
text: Literal[False] | None = None,
|
||||
**kwargs: Any,
|
||||
) -> tuple[SubprocessTransport, _ProtocolT]: ...
|
||||
async def subprocess_exec(
|
||||
self,
|
||||
protocol_factory: Callable[[], _ProtocolT],
|
||||
program: Any,
|
||||
*args: Any,
|
||||
stdin: int | IO[Any] | None = -1,
|
||||
stdout: int | IO[Any] | None = -1,
|
||||
stderr: int | IO[Any] | None = -1,
|
||||
universal_newlines: Literal[False] = False,
|
||||
shell: Literal[False] = False,
|
||||
bufsize: Literal[0] = 0,
|
||||
encoding: None = None,
|
||||
errors: None = None,
|
||||
**kwargs: Any,
|
||||
) -> tuple[SubprocessTransport, _ProtocolT]: ...
|
||||
def add_reader(self, fd: FileDescriptorLike, callback: Callable[[Unpack[_Ts]], Any], *args: Unpack[_Ts]) -> None: ...
|
||||
def remove_reader(self, fd: FileDescriptorLike) -> bool: ...
|
||||
def add_writer(self, fd: FileDescriptorLike, callback: Callable[[Unpack[_Ts]], Any], *args: Unpack[_Ts]) -> None: ...
|
||||
def remove_writer(self, fd: FileDescriptorLike) -> bool: ...
|
||||
# The sock_* methods (and probably some others) are not actually implemented on
|
||||
# BaseEventLoop, only on subclasses. We list them here for now for convenience.
|
||||
async def sock_recv(self, sock: socket, nbytes: int) -> bytes: ...
|
||||
async def sock_recv_into(self, sock: socket, buf: WriteableBuffer) -> int: ...
|
||||
async def sock_sendall(self, sock: socket, data: ReadableBuffer) -> None: ...
|
||||
async def sock_connect(self, sock: socket, address: _Address) -> None: ...
|
||||
async def sock_accept(self, sock: socket) -> tuple[socket, _RetAddress]: ...
|
||||
if sys.version_info >= (3, 11):
|
||||
async def sock_recvfrom(self, sock: socket, bufsize: int) -> tuple[bytes, _RetAddress]: ...
|
||||
async def sock_recvfrom_into(self, sock: socket, buf: WriteableBuffer, nbytes: int = 0) -> tuple[int, _RetAddress]: ...
|
||||
async def sock_sendto(self, sock: socket, data: ReadableBuffer, address: _Address) -> int: ...
|
||||
# Signal handling.
|
||||
def add_signal_handler(self, sig: int, callback: Callable[[Unpack[_Ts]], Any], *args: Unpack[_Ts]) -> None: ...
|
||||
def remove_signal_handler(self, sig: int) -> bool: ...
|
||||
# Error handlers.
|
||||
def set_exception_handler(self, handler: _ExceptionHandler | None) -> None: ...
|
||||
def get_exception_handler(self) -> _ExceptionHandler | None: ...
|
||||
def default_exception_handler(self, context: _Context) -> None: ...
|
||||
def call_exception_handler(self, context: _Context) -> None: ...
|
||||
# Debug flag management.
|
||||
def get_debug(self) -> bool: ...
|
||||
def set_debug(self, enabled: bool) -> None: ...
|
||||
if sys.version_info >= (3, 12):
|
||||
async def shutdown_default_executor(self, timeout: float | None = None) -> None: ...
|
||||
elif sys.version_info >= (3, 9):
|
||||
async def shutdown_default_executor(self) -> None: ...
|
||||
|
||||
def __del__(self) -> None: ...
|
||||
@@ -0,0 +1,19 @@
|
||||
from collections.abc import Callable, Sequence
|
||||
from contextvars import Context # type: ignore
|
||||
from typing import Any, Final
|
||||
|
||||
from . import futures
|
||||
|
||||
__all__ = ()
|
||||
|
||||
# asyncio defines 'isfuture()' in base_futures.py and re-imports it in futures.py
|
||||
# but it leads to circular import error in pytype tool.
|
||||
# That's why the import order is reversed.
|
||||
from .futures import isfuture as isfuture
|
||||
|
||||
_PENDING: Final = "PENDING" # undocumented
|
||||
_CANCELLED: Final = "CANCELLED" # undocumented
|
||||
_FINISHED: Final = "FINISHED" # undocumented
|
||||
|
||||
def _format_callbacks(cb: Sequence[tuple[Callable[[futures.Future[Any]], None], Context]]) -> str: ... # undocumented
|
||||
def _future_repr_info(future: futures.Future[Any]) -> list[str]: ... # undocumented
|
||||
@@ -0,0 +1,9 @@
|
||||
from _typeshed import StrOrBytesPath
|
||||
from types import FrameType
|
||||
from typing import Any
|
||||
|
||||
from . import tasks
|
||||
|
||||
def _task_repr_info(task: tasks.Task[Any]) -> list[str]: ... # undocumented
|
||||
def _task_get_stack(task: tasks.Task[Any], limit: int | None) -> list[FrameType]: ... # undocumented
|
||||
def _task_print_stack(task: tasks.Task[Any], limit: int | None, file: StrOrBytesPath) -> None: ... # undocumented
|
||||
@@ -0,0 +1,20 @@
|
||||
import enum
|
||||
import sys
|
||||
from typing import Final
|
||||
|
||||
LOG_THRESHOLD_FOR_CONNLOST_WRITES: Final = 5
|
||||
ACCEPT_RETRY_DELAY: Final = 1
|
||||
DEBUG_STACK_DEPTH: Final = 10
|
||||
SSL_HANDSHAKE_TIMEOUT: float
|
||||
SENDFILE_FALLBACK_READBUFFER_SIZE: Final = 262144
|
||||
if sys.version_info >= (3, 11):
|
||||
SSL_SHUTDOWN_TIMEOUT: float
|
||||
FLOW_CONTROL_HIGH_WATER_SSL_READ: Final = 256
|
||||
FLOW_CONTROL_HIGH_WATER_SSL_WRITE: Final = 512
|
||||
if sys.version_info >= (3, 12):
|
||||
THREAD_JOIN_TIMEOUT: Final = 300
|
||||
|
||||
class _SendfileMode(enum.Enum):
|
||||
UNSUPPORTED = 1
|
||||
TRY_NATIVE = 2
|
||||
FALLBACK = 3
|
||||
@@ -0,0 +1,26 @@
|
||||
import sys
|
||||
from collections.abc import Awaitable, Callable, Coroutine
|
||||
from typing import Any, TypeVar, overload
|
||||
from typing_extensions import ParamSpec, TypeGuard, TypeIs
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
__all__ = ("iscoroutinefunction", "iscoroutine")
|
||||
else:
|
||||
__all__ = ("coroutine", "iscoroutinefunction", "iscoroutine")
|
||||
|
||||
_T = TypeVar("_T")
|
||||
_FunctionT = TypeVar("_FunctionT", bound=Callable[..., Any])
|
||||
_P = ParamSpec("_P")
|
||||
|
||||
if sys.version_info < (3, 11):
|
||||
def coroutine(func: _FunctionT) -> _FunctionT: ...
|
||||
|
||||
@overload
|
||||
def iscoroutinefunction(func: Callable[..., Coroutine[Any, Any, Any]]) -> bool: ...
|
||||
@overload
|
||||
def iscoroutinefunction(func: Callable[_P, Awaitable[_T]]) -> TypeGuard[Callable[_P, Coroutine[Any, Any, _T]]]: ...
|
||||
@overload
|
||||
def iscoroutinefunction(func: Callable[_P, object]) -> TypeGuard[Callable[_P, Coroutine[Any, Any, Any]]]: ...
|
||||
@overload
|
||||
def iscoroutinefunction(func: object) -> TypeGuard[Callable[..., Coroutine[Any, Any, Any]]]: ...
|
||||
def iscoroutine(obj: object) -> TypeIs[Coroutine[Any, Any, Any]]: ...
|
||||
645
.venv/lib/python3.12/site-packages/stdlib/asyncio/events.pyi
Normal file
645
.venv/lib/python3.12/site-packages/stdlib/asyncio/events.pyi
Normal file
@@ -0,0 +1,645 @@
|
||||
import ssl
|
||||
import sys
|
||||
from _asyncio import (
|
||||
_get_running_loop as _get_running_loop,
|
||||
_set_running_loop as _set_running_loop,
|
||||
get_event_loop as get_event_loop,
|
||||
get_running_loop as get_running_loop,
|
||||
)
|
||||
from _typeshed import FileDescriptorLike, ReadableBuffer, StrPath, Unused, WriteableBuffer
|
||||
from abc import ABCMeta, abstractmethod
|
||||
from collections.abc import Callable, Sequence
|
||||
from contextvars import Context # type: ignore
|
||||
from socket import AddressFamily, SocketKind, _Address, _RetAddress, socket
|
||||
from typing import IO, Any, Literal, Protocol, TypeVar, overload
|
||||
from typing_extensions import Self, TypeAlias, TypeVarTuple, Unpack, deprecated
|
||||
|
||||
from . import _AwaitableLike, _CoroutineLike
|
||||
from .base_events import Server
|
||||
from .futures import Future
|
||||
from .protocols import BaseProtocol
|
||||
from .tasks import Task
|
||||
from .transports import BaseTransport, DatagramTransport, ReadTransport, SubprocessTransport, Transport, WriteTransport
|
||||
from .unix_events import AbstractChildWatcher
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
__all__ = (
|
||||
"AbstractEventLoopPolicy",
|
||||
"AbstractEventLoop",
|
||||
"AbstractServer",
|
||||
"Handle",
|
||||
"TimerHandle",
|
||||
"get_event_loop_policy",
|
||||
"set_event_loop_policy",
|
||||
"get_event_loop",
|
||||
"set_event_loop",
|
||||
"new_event_loop",
|
||||
"_set_running_loop",
|
||||
"get_running_loop",
|
||||
"_get_running_loop",
|
||||
)
|
||||
else:
|
||||
__all__ = (
|
||||
"AbstractEventLoopPolicy",
|
||||
"AbstractEventLoop",
|
||||
"AbstractServer",
|
||||
"Handle",
|
||||
"TimerHandle",
|
||||
"get_event_loop_policy",
|
||||
"set_event_loop_policy",
|
||||
"get_event_loop",
|
||||
"set_event_loop",
|
||||
"new_event_loop",
|
||||
"get_child_watcher",
|
||||
"set_child_watcher",
|
||||
"_set_running_loop",
|
||||
"get_running_loop",
|
||||
"_get_running_loop",
|
||||
)
|
||||
|
||||
_T = TypeVar("_T")
|
||||
_Ts = TypeVarTuple("_Ts")
|
||||
_ProtocolT = TypeVar("_ProtocolT", bound=BaseProtocol)
|
||||
_Context: TypeAlias = dict[str, Any]
|
||||
_ExceptionHandler: TypeAlias = Callable[[AbstractEventLoop, _Context], object]
|
||||
_ProtocolFactory: TypeAlias = Callable[[], BaseProtocol]
|
||||
_SSLContext: TypeAlias = bool | None | ssl.SSLContext
|
||||
|
||||
class _TaskFactory(Protocol):
|
||||
def __call__(self, loop: AbstractEventLoop, factory: _CoroutineLike[_T], /) -> Future[_T]: ...
|
||||
|
||||
class Handle:
|
||||
_cancelled: bool
|
||||
_args: Sequence[Any]
|
||||
def __init__(
|
||||
self, callback: Callable[..., object], args: Sequence[Any], loop: AbstractEventLoop, context: Context | None = None # type: ignore
|
||||
) -> None: ...
|
||||
def cancel(self) -> None: ...
|
||||
def _run(self) -> None: ...
|
||||
def cancelled(self) -> bool: ...
|
||||
if sys.version_info >= (3, 12):
|
||||
def get_context(self) -> Context: ...
|
||||
|
||||
class TimerHandle(Handle):
|
||||
def __init__(
|
||||
self,
|
||||
when: float,
|
||||
callback: Callable[..., object],
|
||||
args: Sequence[Any],
|
||||
loop: AbstractEventLoop,
|
||||
context: Context | None = None, # type: ignore
|
||||
) -> None: ...
|
||||
def __hash__(self) -> int: ...
|
||||
def when(self) -> float: ...
|
||||
def __lt__(self, other: TimerHandle) -> bool: ...
|
||||
def __le__(self, other: TimerHandle) -> bool: ...
|
||||
def __gt__(self, other: TimerHandle) -> bool: ...
|
||||
def __ge__(self, other: TimerHandle) -> bool: ...
|
||||
def __eq__(self, other: object) -> bool: ...
|
||||
|
||||
class AbstractServer:
|
||||
@abstractmethod
|
||||
def close(self) -> None: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
@abstractmethod
|
||||
def close_clients(self) -> None: ...
|
||||
@abstractmethod
|
||||
def abort_clients(self) -> None: ...
|
||||
|
||||
async def __aenter__(self) -> Self: ...
|
||||
async def __aexit__(self, *exc: Unused) -> None: ...
|
||||
@abstractmethod
|
||||
def get_loop(self) -> AbstractEventLoop: ...
|
||||
@abstractmethod
|
||||
def is_serving(self) -> bool: ...
|
||||
@abstractmethod
|
||||
async def start_serving(self) -> None: ...
|
||||
@abstractmethod
|
||||
async def serve_forever(self) -> None: ...
|
||||
@abstractmethod
|
||||
async def wait_closed(self) -> None: ...
|
||||
|
||||
class AbstractEventLoop:
|
||||
slow_callback_duration: float
|
||||
@abstractmethod
|
||||
def run_forever(self) -> None: ...
|
||||
@abstractmethod
|
||||
def run_until_complete(self, future: _AwaitableLike[_T]) -> _T: ...
|
||||
@abstractmethod
|
||||
def stop(self) -> None: ...
|
||||
@abstractmethod
|
||||
def is_running(self) -> bool: ...
|
||||
@abstractmethod
|
||||
def is_closed(self) -> bool: ...
|
||||
@abstractmethod
|
||||
def close(self) -> None: ...
|
||||
@abstractmethod
|
||||
async def shutdown_asyncgens(self) -> None: ...
|
||||
# Methods scheduling callbacks. All these return Handles.
|
||||
if sys.version_info >= (3, 9): # "context" added in 3.9.10/3.10.2
|
||||
@abstractmethod
|
||||
def call_soon(self, callback: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts], context: Context | None = None) -> Handle: ... # type: ignore
|
||||
@abstractmethod
|
||||
def call_later(
|
||||
self, delay: float, callback: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts], context: Context | None = None # type: ignore
|
||||
) -> TimerHandle: ...
|
||||
@abstractmethod
|
||||
def call_at(
|
||||
self, when: float, callback: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts], context: Context | None = None # type: ignore
|
||||
) -> TimerHandle: ...
|
||||
else:
|
||||
@abstractmethod
|
||||
def call_soon(self, callback: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts]) -> Handle: ...
|
||||
@abstractmethod
|
||||
def call_later(self, delay: float, callback: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts]) -> TimerHandle: ...
|
||||
@abstractmethod
|
||||
def call_at(self, when: float, callback: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts]) -> TimerHandle: ...
|
||||
|
||||
@abstractmethod
|
||||
def time(self) -> float: ...
|
||||
# Future methods
|
||||
@abstractmethod
|
||||
def create_future(self) -> Future[Any]: ...
|
||||
# Tasks methods
|
||||
if sys.version_info >= (3, 11):
|
||||
@abstractmethod
|
||||
def create_task(self, coro: _CoroutineLike[_T], *, name: str | None = None, context: Context | None = None) -> Task[_T]: ... # type: ignore
|
||||
else:
|
||||
@abstractmethod
|
||||
def create_task(self, coro: _CoroutineLike[_T], *, name: str | None = None) -> Task[_T]: ...
|
||||
|
||||
@abstractmethod
|
||||
def set_task_factory(self, factory: _TaskFactory | None) -> None: ...
|
||||
@abstractmethod
|
||||
def get_task_factory(self) -> _TaskFactory | None: ...
|
||||
# Methods for interacting with threads
|
||||
if sys.version_info >= (3, 9): # "context" added in 3.9.10/3.10.2
|
||||
@abstractmethod
|
||||
def call_soon_threadsafe(
|
||||
self, callback: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts], context: Context | None = None # type: ignore
|
||||
) -> Handle: ...
|
||||
else:
|
||||
@abstractmethod
|
||||
def call_soon_threadsafe(self, callback: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts]) -> Handle: ...
|
||||
|
||||
@abstractmethod
|
||||
def run_in_executor(self, executor: Any, func: Callable[[Unpack[_Ts]], _T], *args: Unpack[_Ts]) -> Future[_T]: ...
|
||||
@abstractmethod
|
||||
def set_default_executor(self, executor: Any) -> None: ...
|
||||
# Network I/O methods returning Futures.
|
||||
@abstractmethod
|
||||
async def getaddrinfo(
|
||||
self,
|
||||
host: bytes | str | None,
|
||||
port: bytes | str | int | None,
|
||||
*,
|
||||
family: int = 0,
|
||||
type: int = 0,
|
||||
proto: int = 0,
|
||||
flags: int = 0,
|
||||
) -> list[tuple[AddressFamily, SocketKind, int, str, tuple[str, int] | tuple[str, int, int, int]]]: ...
|
||||
@abstractmethod
|
||||
async def getnameinfo(self, sockaddr: tuple[str, int] | tuple[str, int, int, int], flags: int = 0) -> tuple[str, str]: ...
|
||||
if sys.version_info >= (3, 11):
|
||||
@overload
|
||||
@abstractmethod
|
||||
async def create_connection(
|
||||
self,
|
||||
protocol_factory: Callable[[], _ProtocolT],
|
||||
host: str = ...,
|
||||
port: int = ...,
|
||||
*,
|
||||
ssl: _SSLContext = None,
|
||||
family: int = 0,
|
||||
proto: int = 0,
|
||||
flags: int = 0,
|
||||
sock: None = None,
|
||||
local_addr: tuple[str, int] | None = None,
|
||||
server_hostname: str | None = None,
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
ssl_shutdown_timeout: float | None = None,
|
||||
happy_eyeballs_delay: float | None = None,
|
||||
interleave: int | None = None,
|
||||
) -> tuple[Transport, _ProtocolT]: ...
|
||||
@overload
|
||||
@abstractmethod
|
||||
async def create_connection(
|
||||
self,
|
||||
protocol_factory: Callable[[], _ProtocolT],
|
||||
host: None = None,
|
||||
port: None = None,
|
||||
*,
|
||||
ssl: _SSLContext = None,
|
||||
family: int = 0,
|
||||
proto: int = 0,
|
||||
flags: int = 0,
|
||||
sock: socket,
|
||||
local_addr: None = None,
|
||||
server_hostname: str | None = None,
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
ssl_shutdown_timeout: float | None = None,
|
||||
happy_eyeballs_delay: float | None = None,
|
||||
interleave: int | None = None,
|
||||
) -> tuple[Transport, _ProtocolT]: ...
|
||||
else:
|
||||
@overload
|
||||
@abstractmethod
|
||||
async def create_connection(
|
||||
self,
|
||||
protocol_factory: Callable[[], _ProtocolT],
|
||||
host: str = ...,
|
||||
port: int = ...,
|
||||
*,
|
||||
ssl: _SSLContext = None,
|
||||
family: int = 0,
|
||||
proto: int = 0,
|
||||
flags: int = 0,
|
||||
sock: None = None,
|
||||
local_addr: tuple[str, int] | None = None,
|
||||
server_hostname: str | None = None,
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
happy_eyeballs_delay: float | None = None,
|
||||
interleave: int | None = None,
|
||||
) -> tuple[Transport, _ProtocolT]: ...
|
||||
@overload
|
||||
@abstractmethod
|
||||
async def create_connection(
|
||||
self,
|
||||
protocol_factory: Callable[[], _ProtocolT],
|
||||
host: None = None,
|
||||
port: None = None,
|
||||
*,
|
||||
ssl: _SSLContext = None,
|
||||
family: int = 0,
|
||||
proto: int = 0,
|
||||
flags: int = 0,
|
||||
sock: socket,
|
||||
local_addr: None = None,
|
||||
server_hostname: str | None = None,
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
happy_eyeballs_delay: float | None = None,
|
||||
interleave: int | None = None,
|
||||
) -> tuple[Transport, _ProtocolT]: ...
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
# 3.13 added `keep_alive`.
|
||||
@overload
|
||||
@abstractmethod
|
||||
async def create_server(
|
||||
self,
|
||||
protocol_factory: _ProtocolFactory,
|
||||
host: str | Sequence[str] | None = None,
|
||||
port: int = ...,
|
||||
*,
|
||||
family: int = ...,
|
||||
flags: int = ...,
|
||||
sock: None = None,
|
||||
backlog: int = 100,
|
||||
ssl: _SSLContext = None,
|
||||
reuse_address: bool | None = None,
|
||||
reuse_port: bool | None = None,
|
||||
keep_alive: bool | None = None,
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
ssl_shutdown_timeout: float | None = None,
|
||||
start_serving: bool = True,
|
||||
) -> Server: ...
|
||||
@overload
|
||||
@abstractmethod
|
||||
async def create_server(
|
||||
self,
|
||||
protocol_factory: _ProtocolFactory,
|
||||
host: None = None,
|
||||
port: None = None,
|
||||
*,
|
||||
family: int = ...,
|
||||
flags: int = ...,
|
||||
sock: socket = ...,
|
||||
backlog: int = 100,
|
||||
ssl: _SSLContext = None,
|
||||
reuse_address: bool | None = None,
|
||||
reuse_port: bool | None = None,
|
||||
keep_alive: bool | None = None,
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
ssl_shutdown_timeout: float | None = None,
|
||||
start_serving: bool = True,
|
||||
) -> Server: ...
|
||||
elif sys.version_info >= (3, 11):
|
||||
@overload
|
||||
@abstractmethod
|
||||
async def create_server(
|
||||
self,
|
||||
protocol_factory: _ProtocolFactory,
|
||||
host: str | Sequence[str] | None = None,
|
||||
port: int = ...,
|
||||
*,
|
||||
family: int = ...,
|
||||
flags: int = ...,
|
||||
sock: None = None,
|
||||
backlog: int = 100,
|
||||
ssl: _SSLContext = None,
|
||||
reuse_address: bool | None = None,
|
||||
reuse_port: bool | None = None,
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
ssl_shutdown_timeout: float | None = None,
|
||||
start_serving: bool = True,
|
||||
) -> Server: ...
|
||||
@overload
|
||||
@abstractmethod
|
||||
async def create_server(
|
||||
self,
|
||||
protocol_factory: _ProtocolFactory,
|
||||
host: None = None,
|
||||
port: None = None,
|
||||
*,
|
||||
family: int = ...,
|
||||
flags: int = ...,
|
||||
sock: socket = ...,
|
||||
backlog: int = 100,
|
||||
ssl: _SSLContext = None,
|
||||
reuse_address: bool | None = None,
|
||||
reuse_port: bool | None = None,
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
ssl_shutdown_timeout: float | None = None,
|
||||
start_serving: bool = True,
|
||||
) -> Server: ...
|
||||
else:
|
||||
@overload
|
||||
@abstractmethod
|
||||
async def create_server(
|
||||
self,
|
||||
protocol_factory: _ProtocolFactory,
|
||||
host: str | Sequence[str] | None = None,
|
||||
port: int = ...,
|
||||
*,
|
||||
family: int = ...,
|
||||
flags: int = ...,
|
||||
sock: None = None,
|
||||
backlog: int = 100,
|
||||
ssl: _SSLContext = None,
|
||||
reuse_address: bool | None = None,
|
||||
reuse_port: bool | None = None,
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
start_serving: bool = True,
|
||||
) -> Server: ...
|
||||
@overload
|
||||
@abstractmethod
|
||||
async def create_server(
|
||||
self,
|
||||
protocol_factory: _ProtocolFactory,
|
||||
host: None = None,
|
||||
port: None = None,
|
||||
*,
|
||||
family: int = ...,
|
||||
flags: int = ...,
|
||||
sock: socket = ...,
|
||||
backlog: int = 100,
|
||||
ssl: _SSLContext = None,
|
||||
reuse_address: bool | None = None,
|
||||
reuse_port: bool | None = None,
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
start_serving: bool = True,
|
||||
) -> Server: ...
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
@abstractmethod
|
||||
async def start_tls(
|
||||
self,
|
||||
transport: WriteTransport,
|
||||
protocol: BaseProtocol,
|
||||
sslcontext: ssl.SSLContext,
|
||||
*,
|
||||
server_side: bool = False,
|
||||
server_hostname: str | None = None,
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
ssl_shutdown_timeout: float | None = None,
|
||||
) -> Transport | None: ...
|
||||
async def create_unix_server(
|
||||
self,
|
||||
protocol_factory: _ProtocolFactory,
|
||||
path: StrPath | None = None,
|
||||
*,
|
||||
sock: socket | None = None,
|
||||
backlog: int = 100,
|
||||
ssl: _SSLContext = None,
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
ssl_shutdown_timeout: float | None = None,
|
||||
start_serving: bool = True,
|
||||
) -> Server: ...
|
||||
else:
|
||||
@abstractmethod
|
||||
async def start_tls(
|
||||
self,
|
||||
transport: BaseTransport,
|
||||
protocol: BaseProtocol,
|
||||
sslcontext: ssl.SSLContext,
|
||||
*,
|
||||
server_side: bool = False,
|
||||
server_hostname: str | None = None,
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
) -> Transport | None: ...
|
||||
async def create_unix_server(
|
||||
self,
|
||||
protocol_factory: _ProtocolFactory,
|
||||
path: StrPath | None = None,
|
||||
*,
|
||||
sock: socket | None = None,
|
||||
backlog: int = 100,
|
||||
ssl: _SSLContext = None,
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
start_serving: bool = True,
|
||||
) -> Server: ...
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
async def connect_accepted_socket(
|
||||
self,
|
||||
protocol_factory: Callable[[], _ProtocolT],
|
||||
sock: socket,
|
||||
*,
|
||||
ssl: _SSLContext = None,
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
ssl_shutdown_timeout: float | None = None,
|
||||
) -> tuple[Transport, _ProtocolT]: ...
|
||||
elif sys.version_info >= (3, 10):
|
||||
async def connect_accepted_socket(
|
||||
self,
|
||||
protocol_factory: Callable[[], _ProtocolT],
|
||||
sock: socket,
|
||||
*,
|
||||
ssl: _SSLContext = None,
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
) -> tuple[Transport, _ProtocolT]: ...
|
||||
if sys.version_info >= (3, 11):
|
||||
async def create_unix_connection(
|
||||
self,
|
||||
protocol_factory: Callable[[], _ProtocolT],
|
||||
path: str | None = None,
|
||||
*,
|
||||
ssl: _SSLContext = None,
|
||||
sock: socket | None = None,
|
||||
server_hostname: str | None = None,
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
ssl_shutdown_timeout: float | None = None,
|
||||
) -> tuple[Transport, _ProtocolT]: ...
|
||||
else:
|
||||
async def create_unix_connection(
|
||||
self,
|
||||
protocol_factory: Callable[[], _ProtocolT],
|
||||
path: str | None = None,
|
||||
*,
|
||||
ssl: _SSLContext = None,
|
||||
sock: socket | None = None,
|
||||
server_hostname: str | None = None,
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
) -> tuple[Transport, _ProtocolT]: ...
|
||||
|
||||
@abstractmethod
|
||||
async def sock_sendfile(
|
||||
self, sock: socket, file: IO[bytes], offset: int = 0, count: int | None = None, *, fallback: bool | None = None
|
||||
) -> int: ...
|
||||
@abstractmethod
|
||||
async def sendfile(
|
||||
self, transport: WriteTransport, file: IO[bytes], offset: int = 0, count: int | None = None, *, fallback: bool = True
|
||||
) -> int: ...
|
||||
@abstractmethod
|
||||
async def create_datagram_endpoint(
|
||||
self,
|
||||
protocol_factory: Callable[[], _ProtocolT],
|
||||
local_addr: tuple[str, int] | str | None = None,
|
||||
remote_addr: tuple[str, int] | str | None = None,
|
||||
*,
|
||||
family: int = 0,
|
||||
proto: int = 0,
|
||||
flags: int = 0,
|
||||
reuse_address: bool | None = None,
|
||||
reuse_port: bool | None = None,
|
||||
allow_broadcast: bool | None = None,
|
||||
sock: socket | None = None,
|
||||
) -> tuple[DatagramTransport, _ProtocolT]: ...
|
||||
# Pipes and subprocesses.
|
||||
@abstractmethod
|
||||
async def connect_read_pipe(self, protocol_factory: Callable[[], _ProtocolT], pipe: Any) -> tuple[ReadTransport, _ProtocolT]: ...
|
||||
@abstractmethod
|
||||
async def connect_write_pipe(self, protocol_factory: Callable[[], _ProtocolT], pipe: Any) -> tuple[WriteTransport, _ProtocolT]: ...
|
||||
@abstractmethod
|
||||
async def subprocess_shell(
|
||||
self,
|
||||
protocol_factory: Callable[[], _ProtocolT],
|
||||
cmd: bytes | str,
|
||||
*,
|
||||
stdin: int | IO[Any] | None = -1,
|
||||
stdout: int | IO[Any] | None = -1,
|
||||
stderr: int | IO[Any] | None = -1,
|
||||
universal_newlines: Literal[False] = False,
|
||||
shell: Literal[True] = True,
|
||||
bufsize: Literal[0] = 0,
|
||||
encoding: None = None,
|
||||
errors: None = None,
|
||||
text: Literal[False] | None = ...,
|
||||
**kwargs: Any,
|
||||
) -> tuple[SubprocessTransport, _ProtocolT]: ...
|
||||
@abstractmethod
|
||||
async def subprocess_exec(
|
||||
self,
|
||||
protocol_factory: Callable[[], _ProtocolT],
|
||||
program: Any,
|
||||
*args: Any,
|
||||
stdin: int | IO[Any] | None = -1,
|
||||
stdout: int | IO[Any] | None = -1,
|
||||
stderr: int | IO[Any] | None = -1,
|
||||
universal_newlines: Literal[False] = False,
|
||||
shell: Literal[False] = False,
|
||||
bufsize: Literal[0] = 0,
|
||||
encoding: None = None,
|
||||
errors: None = None,
|
||||
**kwargs: Any,
|
||||
) -> tuple[SubprocessTransport, _ProtocolT]: ...
|
||||
@abstractmethod
|
||||
def add_reader(self, fd: FileDescriptorLike, callback: Callable[[Unpack[_Ts]], Any], *args: Unpack[_Ts]) -> None: ...
|
||||
@abstractmethod
|
||||
def remove_reader(self, fd: FileDescriptorLike) -> bool: ...
|
||||
@abstractmethod
|
||||
def add_writer(self, fd: FileDescriptorLike, callback: Callable[[Unpack[_Ts]], Any], *args: Unpack[_Ts]) -> None: ...
|
||||
@abstractmethod
|
||||
def remove_writer(self, fd: FileDescriptorLike) -> bool: ...
|
||||
@abstractmethod
|
||||
async def sock_recv(self, sock: socket, nbytes: int) -> bytes: ...
|
||||
@abstractmethod
|
||||
async def sock_recv_into(self, sock: socket, buf: WriteableBuffer) -> int: ...
|
||||
@abstractmethod
|
||||
async def sock_sendall(self, sock: socket, data: ReadableBuffer) -> None: ...
|
||||
@abstractmethod
|
||||
async def sock_connect(self, sock: socket, address: _Address) -> None: ...
|
||||
@abstractmethod
|
||||
async def sock_accept(self, sock: socket) -> tuple[socket, _RetAddress]: ...
|
||||
if sys.version_info >= (3, 11):
|
||||
@abstractmethod
|
||||
async def sock_recvfrom(self, sock: socket, bufsize: int) -> tuple[bytes, _RetAddress]: ...
|
||||
@abstractmethod
|
||||
async def sock_recvfrom_into(self, sock: socket, buf: WriteableBuffer, nbytes: int = 0) -> tuple[int, _RetAddress]: ...
|
||||
@abstractmethod
|
||||
async def sock_sendto(self, sock: socket, data: ReadableBuffer, address: _Address) -> int: ...
|
||||
# Signal handling.
|
||||
@abstractmethod
|
||||
def add_signal_handler(self, sig: int, callback: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts]) -> None: ...
|
||||
@abstractmethod
|
||||
def remove_signal_handler(self, sig: int) -> bool: ...
|
||||
# Error handlers.
|
||||
@abstractmethod
|
||||
def set_exception_handler(self, handler: _ExceptionHandler | None) -> None: ...
|
||||
@abstractmethod
|
||||
def get_exception_handler(self) -> _ExceptionHandler | None: ...
|
||||
@abstractmethod
|
||||
def default_exception_handler(self, context: _Context) -> None: ...
|
||||
@abstractmethod
|
||||
def call_exception_handler(self, context: _Context) -> None: ...
|
||||
# Debug flag management.
|
||||
@abstractmethod
|
||||
def get_debug(self) -> bool: ...
|
||||
@abstractmethod
|
||||
def set_debug(self, enabled: bool) -> None: ...
|
||||
if sys.version_info >= (3, 9):
|
||||
@abstractmethod
|
||||
async def shutdown_default_executor(self) -> None: ...
|
||||
|
||||
class AbstractEventLoopPolicy:
|
||||
@abstractmethod
|
||||
def get_event_loop(self) -> AbstractEventLoop: ...
|
||||
@abstractmethod
|
||||
def set_event_loop(self, loop: AbstractEventLoop | None) -> None: ...
|
||||
@abstractmethod
|
||||
def new_event_loop(self) -> AbstractEventLoop: ...
|
||||
# Child processes handling (Unix only).
|
||||
if sys.version_info < (3, 14):
|
||||
if sys.version_info >= (3, 12):
|
||||
@abstractmethod
|
||||
@deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14")
|
||||
def get_child_watcher(self) -> AbstractChildWatcher: ...
|
||||
@abstractmethod
|
||||
@deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14")
|
||||
def set_child_watcher(self, watcher: AbstractChildWatcher) -> None: ...
|
||||
else:
|
||||
@abstractmethod
|
||||
def get_child_watcher(self) -> AbstractChildWatcher: ...
|
||||
@abstractmethod
|
||||
def set_child_watcher(self, watcher: AbstractChildWatcher) -> None: ...
|
||||
|
||||
class BaseDefaultEventLoopPolicy(AbstractEventLoopPolicy, metaclass=ABCMeta):
|
||||
def get_event_loop(self) -> AbstractEventLoop: ...
|
||||
def set_event_loop(self, loop: AbstractEventLoop | None) -> None: ...
|
||||
def new_event_loop(self) -> AbstractEventLoop: ...
|
||||
|
||||
def get_event_loop_policy() -> AbstractEventLoopPolicy: ...
|
||||
def set_event_loop_policy(policy: AbstractEventLoopPolicy | None) -> None: ...
|
||||
def set_event_loop(loop: AbstractEventLoop | None) -> None: ...
|
||||
def new_event_loop() -> AbstractEventLoop: ...
|
||||
|
||||
if sys.version_info < (3, 14):
|
||||
if sys.version_info >= (3, 12):
|
||||
@deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14")
|
||||
def get_child_watcher() -> AbstractChildWatcher: ...
|
||||
@deprecated("Deprecated as of Python 3.12; will be removed in Python 3.14")
|
||||
def set_child_watcher(watcher: AbstractChildWatcher) -> None: ...
|
||||
|
||||
else:
|
||||
def get_child_watcher() -> AbstractChildWatcher: ...
|
||||
def set_child_watcher(watcher: AbstractChildWatcher) -> None: ...
|
||||
@@ -0,0 +1,43 @@
|
||||
import sys
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
__all__ = (
|
||||
"BrokenBarrierError",
|
||||
"CancelledError",
|
||||
"InvalidStateError",
|
||||
"TimeoutError",
|
||||
"IncompleteReadError",
|
||||
"LimitOverrunError",
|
||||
"SendfileNotAvailableError",
|
||||
)
|
||||
else:
|
||||
__all__ = (
|
||||
"CancelledError",
|
||||
"InvalidStateError",
|
||||
"TimeoutError",
|
||||
"IncompleteReadError",
|
||||
"LimitOverrunError",
|
||||
"SendfileNotAvailableError",
|
||||
)
|
||||
|
||||
class CancelledError(BaseException): ...
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
from builtins import TimeoutError as TimeoutError
|
||||
else:
|
||||
class TimeoutError(Exception): ...
|
||||
|
||||
class InvalidStateError(Exception): ...
|
||||
class SendfileNotAvailableError(RuntimeError): ...
|
||||
|
||||
class IncompleteReadError(EOFError):
|
||||
expected: int | None
|
||||
partial: bytes
|
||||
def __init__(self, partial: bytes, expected: int | None) -> None: ...
|
||||
|
||||
class LimitOverrunError(Exception):
|
||||
consumed: int
|
||||
def __init__(self, message: str, consumed: int) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
class BrokenBarrierError(RuntimeError): ...
|
||||
@@ -0,0 +1,29 @@
|
||||
import functools
|
||||
import sys
|
||||
import traceback
|
||||
from collections.abc import Iterable
|
||||
from types import FrameType, FunctionType
|
||||
from typing import Any, overload
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
class _HasWrapper:
|
||||
__wrapper__: _HasWrapper | FunctionType
|
||||
|
||||
_FuncType: TypeAlias = FunctionType | _HasWrapper | functools.partial[Any] | functools.partialmethod[Any]
|
||||
|
||||
@overload
|
||||
def _get_function_source(func: _FuncType) -> tuple[str, int]: ...
|
||||
@overload
|
||||
def _get_function_source(func: object) -> tuple[str, int] | None: ...
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
def _format_callback_source(func: object, args: Iterable[Any], *, debug: bool = False) -> str: ...
|
||||
def _format_args_and_kwargs(args: Iterable[Any], kwargs: dict[str, Any], *, debug: bool = False) -> str: ...
|
||||
def _format_callback(func: object, args: Iterable[Any], kwargs: dict[str, Any], *, debug: bool = False, suffix: str = "") -> str: ...
|
||||
|
||||
else:
|
||||
def _format_callback_source(func: object, args: Iterable[Any]) -> str: ...
|
||||
def _format_args_and_kwargs(args: Iterable[Any], kwargs: dict[str, Any]) -> str: ...
|
||||
def _format_callback(func: object, args: Iterable[Any], kwargs: dict[str, Any], suffix: str = "") -> str: ...
|
||||
|
||||
def extract_stack(f: FrameType | None = None, limit: int | None = None) -> traceback.StackSummary: ...
|
||||
@@ -0,0 +1,16 @@
|
||||
from _asyncio import Future as Future
|
||||
from concurrent.futures._base import Future as _ConcurrentFuture
|
||||
from typing import Any, TypeVar
|
||||
from typing_extensions import TypeIs
|
||||
|
||||
from .events import AbstractEventLoop
|
||||
|
||||
__all__ = ("Future", "wrap_future", "isfuture")
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
# asyncio defines 'isfuture()' in base_futures.py and re-imports it in futures.py
|
||||
# but it leads to circular import error in pytype tool.
|
||||
# That's why the import order is reversed.
|
||||
def isfuture(obj: object) -> TypeIs[Future[Any]]: ...
|
||||
def wrap_future(future: _ConcurrentFuture[_T] | Future[_T], *, loop: AbstractEventLoop | None = None) -> Future[_T]: ...
|
||||
117
.venv/lib/python3.12/site-packages/stdlib/asyncio/locks.pyi
Normal file
117
.venv/lib/python3.12/site-packages/stdlib/asyncio/locks.pyi
Normal file
@@ -0,0 +1,117 @@
|
||||
import enum
|
||||
import sys
|
||||
from _typeshed import Unused
|
||||
from collections import deque
|
||||
from collections.abc import Callable, Generator
|
||||
from types import TracebackType
|
||||
from typing import Any, Literal, TypeVar
|
||||
from typing_extensions import Self
|
||||
|
||||
from .events import AbstractEventLoop
|
||||
from .futures import Future
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
from .mixins import _LoopBoundMixin
|
||||
else:
|
||||
_LoopBoundMixin = object
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
__all__ = ("Lock", "Event", "Condition", "Semaphore", "BoundedSemaphore", "Barrier")
|
||||
else:
|
||||
__all__ = ("Lock", "Event", "Condition", "Semaphore", "BoundedSemaphore")
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
class _ContextManagerMixin:
|
||||
async def __aenter__(self) -> None: ...
|
||||
async def __aexit__(self, exc_type: type[BaseException] | None, exc: BaseException | None, tb: TracebackType | None) -> None: ...
|
||||
|
||||
else:
|
||||
class _ContextManager:
|
||||
def __init__(self, lock: Lock | Semaphore) -> None: ...
|
||||
def __enter__(self) -> None: ...
|
||||
def __exit__(self, *args: Unused) -> None: ...
|
||||
|
||||
class _ContextManagerMixin:
|
||||
# Apparently this exists to *prohibit* use as a context manager.
|
||||
# def __enter__(self) -> NoReturn: ... see: https://github.com/python/typing/issues/1043
|
||||
# def __exit__(self, *args: Any) -> None: ...
|
||||
def __iter__(self) -> Generator[Any, None, _ContextManager]: ...
|
||||
def __await__(self) -> Generator[Any, None, _ContextManager]: ...
|
||||
async def __aenter__(self) -> None: ...
|
||||
async def __aexit__(self, exc_type: type[BaseException] | None, exc: BaseException | None, tb: TracebackType | None) -> None: ...
|
||||
|
||||
class Lock(_ContextManagerMixin, _LoopBoundMixin):
|
||||
_waiters: deque[Future[Any]] | None
|
||||
if sys.version_info >= (3, 10):
|
||||
def __init__(self) -> None: ...
|
||||
else:
|
||||
def __init__(self, *, loop: AbstractEventLoop | None = None) -> None: ...
|
||||
|
||||
def locked(self) -> bool: ...
|
||||
async def acquire(self) -> Literal[True]: ...
|
||||
def release(self) -> None: ...
|
||||
|
||||
class Event(_LoopBoundMixin):
|
||||
_waiters: deque[Future[Any]]
|
||||
if sys.version_info >= (3, 10):
|
||||
def __init__(self) -> None: ...
|
||||
else:
|
||||
def __init__(self, *, loop: AbstractEventLoop | None = None) -> None: ...
|
||||
|
||||
def is_set(self) -> bool: ...
|
||||
def set(self) -> None: ...
|
||||
def clear(self) -> None: ...
|
||||
async def wait(self) -> Literal[True]: ...
|
||||
|
||||
class Condition(_ContextManagerMixin, _LoopBoundMixin):
|
||||
_waiters: deque[Future[Any]]
|
||||
if sys.version_info >= (3, 10):
|
||||
def __init__(self, lock: Lock | None = None) -> None: ...
|
||||
else:
|
||||
def __init__(self, lock: Lock | None = None, *, loop: AbstractEventLoop | None = None) -> None: ...
|
||||
|
||||
def locked(self) -> bool: ...
|
||||
async def acquire(self) -> Literal[True]: ...
|
||||
def release(self) -> None: ...
|
||||
async def wait(self) -> Literal[True]: ...
|
||||
async def wait_for(self, predicate: Callable[[], _T]) -> _T: ...
|
||||
def notify(self, n: int = 1) -> None: ...
|
||||
def notify_all(self) -> None: ...
|
||||
|
||||
class Semaphore(_ContextManagerMixin, _LoopBoundMixin):
|
||||
_value: int
|
||||
_waiters: deque[Future[Any]] | None
|
||||
if sys.version_info >= (3, 10):
|
||||
def __init__(self, value: int = 1) -> None: ...
|
||||
else:
|
||||
def __init__(self, value: int = 1, *, loop: AbstractEventLoop | None = None) -> None: ...
|
||||
|
||||
def locked(self) -> bool: ...
|
||||
async def acquire(self) -> Literal[True]: ...
|
||||
def release(self) -> None: ...
|
||||
def _wake_up_next(self) -> None: ...
|
||||
|
||||
class BoundedSemaphore(Semaphore): ...
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
class _BarrierState(enum.Enum): # undocumented
|
||||
FILLING = "filling"
|
||||
DRAINING = "draining"
|
||||
RESETTING = "resetting"
|
||||
BROKEN = "broken"
|
||||
|
||||
class Barrier(_LoopBoundMixin):
|
||||
def __init__(self, parties: int) -> None: ...
|
||||
async def __aenter__(self) -> Self: ...
|
||||
async def __aexit__(self, *args: Unused) -> None: ...
|
||||
async def wait(self) -> int: ...
|
||||
async def abort(self) -> None: ...
|
||||
async def reset(self) -> None: ...
|
||||
@property
|
||||
def parties(self) -> int: ...
|
||||
@property
|
||||
def n_waiting(self) -> int: ...
|
||||
@property
|
||||
def broken(self) -> bool: ...
|
||||
@@ -0,0 +1,3 @@
|
||||
import logging
|
||||
|
||||
logger: logging.Logger
|
||||
@@ -0,0 +1,39 @@
|
||||
class ThreadSafeFlag:
|
||||
"""
|
||||
class ThreadSafeFlag
|
||||
--------------------
|
||||
"""
|
||||
|
||||
state: int
|
||||
def __init__(self) -> None:
|
||||
"""
|
||||
Create a new flag which can be used to synchronise a task with code running
|
||||
outside the asyncio loop, such as other threads, IRQs, or scheduler
|
||||
callbacks. Flags start in the cleared state.
|
||||
"""
|
||||
|
||||
def ioctl(self, req, flags): ...
|
||||
def set(self) -> None:
|
||||
"""
|
||||
Set the flag. If there is a task waiting on the flag, it will be scheduled
|
||||
to run.
|
||||
"""
|
||||
...
|
||||
|
||||
def clear(self) -> None:
|
||||
"""
|
||||
Clear the flag. This may be used to ensure that a possibly previously-set
|
||||
flag is clear before waiting for it.
|
||||
"""
|
||||
...
|
||||
|
||||
async def wait(self) -> Generator[Incomplete]:
|
||||
"""
|
||||
Wait for the flag to be set. If the flag is already set then it returns
|
||||
immediately. The flag is automatically reset upon return from ``wait``.
|
||||
|
||||
A flag may only be waited on by a single task at a time.
|
||||
|
||||
This is a coroutine.
|
||||
"""
|
||||
...
|
||||
@@ -0,0 +1,9 @@
|
||||
import sys
|
||||
import threading
|
||||
from typing_extensions import Never
|
||||
|
||||
_global_lock: threading.Lock
|
||||
|
||||
class _LoopBoundMixin:
|
||||
if sys.version_info < (3, 11):
|
||||
def __init__(self, *, loop: Never = ...) -> None: ...
|
||||
@@ -0,0 +1,64 @@
|
||||
import sys
|
||||
from collections.abc import Mapping
|
||||
from socket import socket
|
||||
from typing import Any, ClassVar, Literal
|
||||
|
||||
from . import base_events, constants, events, futures, streams, transports
|
||||
|
||||
__all__ = ("BaseProactorEventLoop",)
|
||||
|
||||
class _ProactorBasePipeTransport(transports._FlowControlMixin, transports.BaseTransport):
|
||||
def __init__(
|
||||
self,
|
||||
loop: events.AbstractEventLoop,
|
||||
sock: socket,
|
||||
protocol: streams.StreamReaderProtocol,
|
||||
waiter: futures.Future[Any] | None = None,
|
||||
extra: Mapping[Any, Any] | None = None,
|
||||
server: events.AbstractServer | None = None,
|
||||
) -> None: ...
|
||||
def __del__(self) -> None: ...
|
||||
|
||||
class _ProactorReadPipeTransport(_ProactorBasePipeTransport, transports.ReadTransport):
|
||||
if sys.version_info >= (3, 10):
|
||||
def __init__(
|
||||
self,
|
||||
loop: events.AbstractEventLoop,
|
||||
sock: socket,
|
||||
protocol: streams.StreamReaderProtocol,
|
||||
waiter: futures.Future[Any] | None = None,
|
||||
extra: Mapping[Any, Any] | None = None,
|
||||
server: events.AbstractServer | None = None,
|
||||
buffer_size: int = 65536,
|
||||
) -> None: ...
|
||||
else:
|
||||
def __init__(
|
||||
self,
|
||||
loop: events.AbstractEventLoop,
|
||||
sock: socket,
|
||||
protocol: streams.StreamReaderProtocol,
|
||||
waiter: futures.Future[Any] | None = None,
|
||||
extra: Mapping[Any, Any] | None = None,
|
||||
server: events.AbstractServer | None = None,
|
||||
) -> None: ...
|
||||
|
||||
class _ProactorBaseWritePipeTransport(_ProactorBasePipeTransport, transports.WriteTransport): ...
|
||||
class _ProactorWritePipeTransport(_ProactorBaseWritePipeTransport): ...
|
||||
class _ProactorDuplexPipeTransport(_ProactorReadPipeTransport, _ProactorBaseWritePipeTransport, transports.Transport): ...
|
||||
|
||||
class _ProactorSocketTransport(_ProactorReadPipeTransport, _ProactorBaseWritePipeTransport, transports.Transport):
|
||||
_sendfile_compatible: ClassVar[constants._SendfileMode]
|
||||
def __init__(
|
||||
self,
|
||||
loop: events.AbstractEventLoop,
|
||||
sock: socket,
|
||||
protocol: streams.StreamReaderProtocol,
|
||||
waiter: futures.Future[Any] | None = None,
|
||||
extra: Mapping[Any, Any] | None = None,
|
||||
server: events.AbstractServer | None = None,
|
||||
) -> None: ...
|
||||
def _set_extra(self, sock: socket) -> None: ...
|
||||
def can_write_eof(self) -> Literal[True]: ...
|
||||
|
||||
class BaseProactorEventLoop(base_events.BaseEventLoop):
|
||||
def __init__(self, proactor: Any) -> None: ...
|
||||
@@ -0,0 +1,34 @@
|
||||
from _typeshed import ReadableBuffer
|
||||
from asyncio import transports
|
||||
from typing import Any
|
||||
|
||||
__all__ = ("BaseProtocol", "Protocol", "DatagramProtocol", "SubprocessProtocol", "BufferedProtocol")
|
||||
|
||||
class BaseProtocol:
|
||||
def connection_made(self, transport: transports.BaseTransport) -> None: ...
|
||||
def connection_lost(self, exc: Exception | None) -> None: ...
|
||||
def pause_writing(self) -> None: ...
|
||||
def resume_writing(self) -> None: ...
|
||||
|
||||
class Protocol(BaseProtocol):
|
||||
def data_received(self, data: bytes) -> None: ...
|
||||
def eof_received(self) -> bool | None: ...
|
||||
|
||||
class BufferedProtocol(BaseProtocol):
|
||||
def get_buffer(self, sizehint: int) -> ReadableBuffer: ...
|
||||
def buffer_updated(self, nbytes: int) -> None: ...
|
||||
def eof_received(self) -> bool | None: ...
|
||||
|
||||
class DatagramProtocol(BaseProtocol):
|
||||
def connection_made(self, transport: transports.DatagramTransport) -> None: ... # type: ignore[override]
|
||||
# addr can be a tuple[int, int] for some unusual protocols like socket.AF_NETLINK.
|
||||
# Use tuple[str | Any, int] to not cause typechecking issues on most usual cases.
|
||||
# This could be improved by using tuple[AnyOf[str, int], int] if the AnyOf feature is accepted.
|
||||
# See https://github.com/python/typing/issues/566
|
||||
def datagram_received(self, data: bytes, addr: tuple[str | Any, int]) -> None: ...
|
||||
def error_received(self, exc: Exception) -> None: ...
|
||||
|
||||
class SubprocessProtocol(BaseProtocol):
|
||||
def pipe_data_received(self, fd: int, data: bytes) -> None: ...
|
||||
def pipe_connection_lost(self, fd: int, exc: Exception | None) -> None: ...
|
||||
def process_exited(self) -> None: ...
|
||||
56
.venv/lib/python3.12/site-packages/stdlib/asyncio/queues.pyi
Normal file
56
.venv/lib/python3.12/site-packages/stdlib/asyncio/queues.pyi
Normal file
@@ -0,0 +1,56 @@
|
||||
import sys
|
||||
from asyncio.events import AbstractEventLoop
|
||||
from typing import Any, Generic, TypeVar
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
from types import GenericAlias
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
from .mixins import _LoopBoundMixin
|
||||
else:
|
||||
_LoopBoundMixin = object
|
||||
|
||||
class QueueEmpty(Exception): ...
|
||||
class QueueFull(Exception): ...
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
__all__ = ("Queue", "PriorityQueue", "LifoQueue", "QueueFull", "QueueEmpty", "QueueShutDown")
|
||||
|
||||
else:
|
||||
__all__ = ("Queue", "PriorityQueue", "LifoQueue", "QueueFull", "QueueEmpty")
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
class QueueShutDown(Exception): ...
|
||||
|
||||
# If Generic[_T] is last and _LoopBoundMixin is object, pyright is unhappy.
|
||||
# We can remove the noqa pragma when dropping 3.9 support.
|
||||
class Queue(Generic[_T], _LoopBoundMixin): # noqa: Y059
|
||||
if sys.version_info >= (3, 10):
|
||||
def __init__(self, maxsize: int = 0) -> None: ...
|
||||
else:
|
||||
def __init__(self, maxsize: int = 0, *, loop: AbstractEventLoop | None = None) -> None: ...
|
||||
|
||||
def _init(self, maxsize: int) -> None: ...
|
||||
def _get(self) -> _T: ...
|
||||
def _put(self, item: _T) -> None: ...
|
||||
def _format(self) -> str: ...
|
||||
def qsize(self) -> int: ...
|
||||
@property
|
||||
def maxsize(self) -> int: ...
|
||||
def empty(self) -> bool: ...
|
||||
def full(self) -> bool: ...
|
||||
async def put(self, item: _T) -> None: ...
|
||||
def put_nowait(self, item: _T) -> None: ...
|
||||
async def get(self) -> _T: ...
|
||||
def get_nowait(self) -> _T: ...
|
||||
async def join(self) -> None: ...
|
||||
def task_done(self) -> None: ...
|
||||
if sys.version_info >= (3, 9):
|
||||
def __class_getitem__(cls, type: Any, /) -> GenericAlias: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
def shutdown(self, immediate: bool = False) -> None: ...
|
||||
|
||||
class PriorityQueue(Queue[_T]): ...
|
||||
class LifoQueue(Queue[_T]): ...
|
||||
@@ -0,0 +1,7 @@
|
||||
# micropython-stdlib asyncio
|
||||
|
||||
This is a hand edited version of the [asyncio](https://docs.python.org/3/library/asyncio.html) library for MicroPython.
|
||||
It is a subset of the original library, and is not a complete implementation. It is intended to be used with MicroPython.
|
||||
|
||||
It is packaged as part of the `micropython-stdlib-stubs` type-stub package.
|
||||
|
||||
@@ -0,0 +1,32 @@
|
||||
import sys
|
||||
from _typeshed import Unused
|
||||
from collections.abc import Callable, Coroutine
|
||||
from contextvars import Context # type: ignore
|
||||
from typing import Any, TypeVar, final
|
||||
from typing_extensions import Self
|
||||
|
||||
from .events import AbstractEventLoop
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
__all__ = ("Runner", "run")
|
||||
else:
|
||||
__all__ = ("run",)
|
||||
_T = TypeVar("_T")
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
@final
|
||||
class Runner:
|
||||
def __init__(self, *, debug: bool | None = None, loop_factory: Callable[[], AbstractEventLoop] | None = None) -> None: ...
|
||||
def __enter__(self) -> Self: ...
|
||||
def __exit__(self, exc_type: Unused, exc_val: Unused, exc_tb: Unused) -> None: ...
|
||||
def close(self) -> None: ...
|
||||
def get_loop(self) -> AbstractEventLoop: ...
|
||||
def run(self, coro: Coroutine[Any, Any, _T], *, context: Context | None = None) -> _T: ... # type: ignore
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
def run(
|
||||
main: Coroutine[Any, Any, _T], *, debug: bool | None = ..., loop_factory: Callable[[], AbstractEventLoop] | None = ...
|
||||
) -> _T: ...
|
||||
|
||||
else:
|
||||
def run(main: Coroutine[Any, Any, _T], *, debug: bool | None = None) -> _T: ...
|
||||
@@ -0,0 +1,8 @@
|
||||
import selectors
|
||||
|
||||
from . import base_events
|
||||
|
||||
__all__ = ("BaseSelectorEventLoop",)
|
||||
|
||||
class BaseSelectorEventLoop(base_events.BaseEventLoop):
|
||||
def __init__(self, selector: selectors.BaseSelector | None = None) -> None: ...
|
||||
165
.venv/lib/python3.12/site-packages/stdlib/asyncio/sslproto.pyi
Normal file
165
.venv/lib/python3.12/site-packages/stdlib/asyncio/sslproto.pyi
Normal file
@@ -0,0 +1,165 @@
|
||||
import ssl
|
||||
import sys
|
||||
from collections import deque
|
||||
from collections.abc import Callable
|
||||
from enum import Enum
|
||||
from typing import Any, ClassVar, Final, Literal
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
from . import constants, events, futures, protocols, transports
|
||||
|
||||
def _create_transport_context(server_side: bool, server_hostname: str | None) -> ssl.SSLContext: ...
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
SSLAgainErrors: tuple[type[ssl.SSLWantReadError], type[ssl.SSLSyscallError]]
|
||||
|
||||
class SSLProtocolState(Enum):
|
||||
UNWRAPPED = "UNWRAPPED"
|
||||
DO_HANDSHAKE = "DO_HANDSHAKE"
|
||||
WRAPPED = "WRAPPED"
|
||||
FLUSHING = "FLUSHING"
|
||||
SHUTDOWN = "SHUTDOWN"
|
||||
|
||||
class AppProtocolState(Enum):
|
||||
STATE_INIT = "STATE_INIT"
|
||||
STATE_CON_MADE = "STATE_CON_MADE"
|
||||
STATE_EOF = "STATE_EOF"
|
||||
STATE_CON_LOST = "STATE_CON_LOST"
|
||||
|
||||
def add_flowcontrol_defaults(high: int | None, low: int | None, kb: int) -> tuple[int, int]: ...
|
||||
|
||||
else:
|
||||
_UNWRAPPED: Final = "UNWRAPPED"
|
||||
_DO_HANDSHAKE: Final = "DO_HANDSHAKE"
|
||||
_WRAPPED: Final = "WRAPPED"
|
||||
_SHUTDOWN: Final = "SHUTDOWN"
|
||||
|
||||
if sys.version_info < (3, 11):
|
||||
class _SSLPipe:
|
||||
max_size: ClassVar[int]
|
||||
|
||||
_context: ssl.SSLContext
|
||||
_server_side: bool
|
||||
_server_hostname: str | None
|
||||
_state: str
|
||||
_incoming: ssl.MemoryBIO
|
||||
_outgoing: ssl.MemoryBIO
|
||||
_sslobj: ssl.SSLObject | None
|
||||
_need_ssldata: bool
|
||||
_handshake_cb: Callable[[BaseException | None], None] | None
|
||||
_shutdown_cb: Callable[[], None] | None
|
||||
def __init__(self, context: ssl.SSLContext, server_side: bool, server_hostname: str | None = None) -> None: ...
|
||||
@property
|
||||
def context(self) -> ssl.SSLContext: ...
|
||||
@property
|
||||
def ssl_object(self) -> ssl.SSLObject | None: ...
|
||||
@property
|
||||
def need_ssldata(self) -> bool: ...
|
||||
@property
|
||||
def wrapped(self) -> bool: ...
|
||||
def do_handshake(self, callback: Callable[[BaseException | None], object] | None = None) -> list[bytes]: ...
|
||||
def shutdown(self, callback: Callable[[], object] | None = None) -> list[bytes]: ...
|
||||
def feed_eof(self) -> None: ...
|
||||
def feed_ssldata(self, data: bytes, only_handshake: bool = False) -> tuple[list[bytes], list[bytes]]: ...
|
||||
def feed_appdata(self, data: bytes, offset: int = 0) -> tuple[list[bytes], int]: ...
|
||||
|
||||
class _SSLProtocolTransport(transports._FlowControlMixin, transports.Transport):
|
||||
_sendfile_compatible: ClassVar[constants._SendfileMode]
|
||||
|
||||
_loop: events.AbstractEventLoop
|
||||
if sys.version_info >= (3, 11):
|
||||
_ssl_protocol: SSLProtocol | None
|
||||
else:
|
||||
_ssl_protocol: SSLProtocol
|
||||
_closed: bool
|
||||
def __init__(self, loop: events.AbstractEventLoop, ssl_protocol: SSLProtocol) -> None: ...
|
||||
def get_extra_info(self, name: str, default: Any | None = None) -> dict[str, Any]: ...
|
||||
@property
|
||||
def _protocol_paused(self) -> bool: ...
|
||||
def write(self, data: bytes | bytearray | memoryview) -> None: ...
|
||||
def can_write_eof(self) -> Literal[False]: ...
|
||||
if sys.version_info >= (3, 11):
|
||||
def get_write_buffer_limits(self) -> tuple[int, int]: ...
|
||||
def get_read_buffer_limits(self) -> tuple[int, int]: ...
|
||||
def set_read_buffer_limits(self, high: int | None = None, low: int | None = None) -> None: ...
|
||||
def get_read_buffer_size(self) -> int: ...
|
||||
|
||||
def __del__(self) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
_SSLProtocolBase: TypeAlias = protocols.BufferedProtocol
|
||||
else:
|
||||
_SSLProtocolBase: TypeAlias = protocols.Protocol
|
||||
|
||||
class SSLProtocol(_SSLProtocolBase):
|
||||
_server_side: bool
|
||||
_server_hostname: str | None
|
||||
_sslcontext: ssl.SSLContext
|
||||
_extra: dict[str, Any]
|
||||
_write_backlog: deque[tuple[bytes, int]]
|
||||
_write_buffer_size: int
|
||||
_waiter: futures.Future[Any]
|
||||
_loop: events.AbstractEventLoop
|
||||
_app_transport: _SSLProtocolTransport
|
||||
_transport: transports.BaseTransport | None
|
||||
_ssl_handshake_timeout: int | None
|
||||
_app_protocol: protocols.BaseProtocol
|
||||
_app_protocol_is_buffer: bool
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
max_size: ClassVar[int]
|
||||
else:
|
||||
_sslpipe: _SSLPipe | None
|
||||
_session_established: bool
|
||||
_call_connection_made: bool
|
||||
_in_handshake: bool
|
||||
_in_shutdown: bool
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
def __init__(
|
||||
self,
|
||||
loop: events.AbstractEventLoop,
|
||||
app_protocol: protocols.BaseProtocol,
|
||||
sslcontext: ssl.SSLContext,
|
||||
waiter: futures.Future[Any],
|
||||
server_side: bool = False,
|
||||
server_hostname: str | None = None,
|
||||
call_connection_made: bool = True,
|
||||
ssl_handshake_timeout: int | None = None,
|
||||
ssl_shutdown_timeout: float | None = None,
|
||||
) -> None: ...
|
||||
else:
|
||||
def __init__(
|
||||
self,
|
||||
loop: events.AbstractEventLoop,
|
||||
app_protocol: protocols.BaseProtocol,
|
||||
sslcontext: ssl.SSLContext,
|
||||
waiter: futures.Future[Any],
|
||||
server_side: bool = False,
|
||||
server_hostname: str | None = None,
|
||||
call_connection_made: bool = True,
|
||||
ssl_handshake_timeout: int | None = None,
|
||||
) -> None: ...
|
||||
|
||||
def _set_app_protocol(self, app_protocol: protocols.BaseProtocol) -> None: ...
|
||||
def _wakeup_waiter(self, exc: BaseException | None = None) -> None: ...
|
||||
def connection_lost(self, exc: BaseException | None) -> None: ...
|
||||
def eof_received(self) -> None: ...
|
||||
def _get_extra_info(self, name: str, default: Any | None = None) -> Any: ...
|
||||
def _start_shutdown(self) -> None: ...
|
||||
if sys.version_info >= (3, 11):
|
||||
def _write_appdata(self, list_of_data: list[bytes]) -> None: ...
|
||||
else:
|
||||
def _write_appdata(self, data: bytes) -> None: ...
|
||||
|
||||
def _start_handshake(self) -> None: ...
|
||||
def _check_handshake_timeout(self) -> None: ...
|
||||
def _on_handshake_complete(self, handshake_exc: BaseException | None) -> None: ...
|
||||
def _fatal_error(self, exc: BaseException, message: str = "Fatal error on transport") -> None: ...
|
||||
if sys.version_info >= (3, 11):
|
||||
def _abort(self, exc: BaseException | None) -> None: ...
|
||||
def get_buffer(self, n: int) -> memoryview: ...
|
||||
else:
|
||||
def _abort(self) -> None: ...
|
||||
def _finalize(self) -> None: ...
|
||||
def _process_write_backlog(self) -> None: ...
|
||||
@@ -0,0 +1,10 @@
|
||||
from collections.abc import Awaitable, Callable, Iterable
|
||||
from typing import Any
|
||||
|
||||
from . import events
|
||||
|
||||
__all__ = ("staggered_race",)
|
||||
|
||||
async def staggered_race(
|
||||
coro_fns: Iterable[Callable[[], Awaitable[Any]]], delay: float | None, *, loop: events.AbstractEventLoop | None = None
|
||||
) -> tuple[Any, int | None, list[Exception | None]]: ...
|
||||
187
.venv/lib/python3.12/site-packages/stdlib/asyncio/streams.pyi
Normal file
187
.venv/lib/python3.12/site-packages/stdlib/asyncio/streams.pyi
Normal file
@@ -0,0 +1,187 @@
|
||||
import ssl
|
||||
import sys
|
||||
from _typeshed import ReadableBuffer, StrPath, Incomplete
|
||||
from collections.abc import Awaitable, Callable, Iterable, Sequence, Sized
|
||||
from types import ModuleType
|
||||
from typing import Any, Protocol, SupportsIndex
|
||||
from typing_extensions import Self, TypeAlias
|
||||
|
||||
from . import events, protocols, transports
|
||||
from .base_events import Server
|
||||
|
||||
if sys.platform == "win32":
|
||||
__all__ = (
|
||||
"StreamReader",
|
||||
"StreamWriter",
|
||||
"StreamReaderProtocol",
|
||||
"open_connection",
|
||||
"start_server",
|
||||
)
|
||||
else:
|
||||
__all__ = (
|
||||
"StreamReader",
|
||||
"StreamWriter",
|
||||
"StreamReaderProtocol",
|
||||
"open_connection",
|
||||
"start_server",
|
||||
"open_unix_connection",
|
||||
"start_unix_server",
|
||||
)
|
||||
|
||||
_ClientConnectedCallback: TypeAlias = Callable[[StreamReader, StreamWriter], Awaitable[None] | None]
|
||||
|
||||
class _ReaduntilBuffer(ReadableBuffer, Sized, Protocol): ...
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
async def open_connection(
|
||||
host: str | None = None,
|
||||
port: int | str | None = None,
|
||||
*,
|
||||
limit: int = 65536,
|
||||
ssl_handshake_timeout: float | None = ...,
|
||||
**kwds: Any,
|
||||
) -> tuple[StreamReader, StreamWriter]: ...
|
||||
async def start_server(
|
||||
client_connected_cb: _ClientConnectedCallback,
|
||||
host: str | Sequence[str] | None = None,
|
||||
port: int | str | None = None,
|
||||
backlog: int = 5, # MicroPython backlog argument
|
||||
*,
|
||||
ssl: Incomplete | None = ...,
|
||||
# limit: int = 65536,
|
||||
# ssl_handshake_timeout: float | None = ...,
|
||||
# **kwds: Any,
|
||||
) -> Server: ...
|
||||
|
||||
else:
|
||||
async def open_connection(
|
||||
host: str | None = None,
|
||||
port: int | str | None = None,
|
||||
*,
|
||||
loop: events.AbstractEventLoop | None = None,
|
||||
limit: int = 65536,
|
||||
ssl_handshake_timeout: float | None = ...,
|
||||
**kwds: Any,
|
||||
) -> tuple[StreamReader, StreamWriter]: ...
|
||||
async def start_server(
|
||||
client_connected_cb: _ClientConnectedCallback,
|
||||
host: str | None = None,
|
||||
port: int | str | None = None,
|
||||
*,
|
||||
loop: events.AbstractEventLoop | None = None,
|
||||
limit: int = 65536,
|
||||
ssl_handshake_timeout: float | None = ...,
|
||||
**kwds: Any,
|
||||
) -> Server: ...
|
||||
|
||||
if sys.platform != "win32":
|
||||
if sys.version_info >= (3, 10):
|
||||
async def open_unix_connection(
|
||||
path: StrPath | None = None, *, limit: int = 65536, **kwds: Any
|
||||
) -> tuple[StreamReader, StreamWriter]: ...
|
||||
async def start_unix_server(
|
||||
client_connected_cb: _ClientConnectedCallback,
|
||||
path: StrPath | None = None,
|
||||
*,
|
||||
limit: int = 65536,
|
||||
**kwds: Any,
|
||||
) -> Server: ...
|
||||
else:
|
||||
async def open_unix_connection(
|
||||
path: StrPath | None = None,
|
||||
*,
|
||||
loop: events.AbstractEventLoop | None = None,
|
||||
limit: int = 65536,
|
||||
**kwds: Any,
|
||||
) -> tuple[StreamReader, StreamWriter]: ...
|
||||
async def start_unix_server(
|
||||
client_connected_cb: _ClientConnectedCallback,
|
||||
path: StrPath | None = None,
|
||||
*,
|
||||
loop: events.AbstractEventLoop | None = None,
|
||||
limit: int = 65536,
|
||||
**kwds: Any,
|
||||
) -> Server: ...
|
||||
|
||||
class FlowControlMixin(protocols.Protocol):
|
||||
def __init__(self, loop: events.AbstractEventLoop | None = None) -> None: ...
|
||||
|
||||
class StreamReaderProtocol(FlowControlMixin, protocols.Protocol):
|
||||
def __init__(
|
||||
self,
|
||||
stream_reader: StreamReader,
|
||||
# client_connected_cb: _ClientConnectedCallback | None = None,
|
||||
# loop: events.AbstractEventLoop | None = None,
|
||||
) -> None: ...
|
||||
def __del__(self) -> None: ...
|
||||
|
||||
class StreamWriter:
|
||||
def __init__(
|
||||
self,
|
||||
transport: transports.WriteTransport | Incomplete,
|
||||
protocol: protocols.BaseProtocol | Incomplete,
|
||||
# MicroPython doesn't support reader and loop arguments
|
||||
# reader: StreamReader | None,
|
||||
# loop: events.AbstractEventLoop,
|
||||
) -> None: ...
|
||||
@property
|
||||
def transport(self) -> transports.WriteTransport: ...
|
||||
def write(self, data: bytes | bytearray | memoryview | str) -> None: ...
|
||||
def awrite(self, data: bytes | bytearray | memoryview | str) -> Awaitable: ...
|
||||
def writelines(self, data: Iterable[bytes | bytearray | memoryview]) -> None: ...
|
||||
def write_eof(self) -> None: ...
|
||||
def can_write_eof(self) -> bool: ...
|
||||
def close(self) -> None: ...
|
||||
def is_closing(self) -> bool: ...
|
||||
async def wait_closed(self) -> None: ...
|
||||
def get_extra_info(self, name: str, default: Any = None) -> Any: ...
|
||||
async def drain(self) -> None: ...
|
||||
if sys.version_info >= (3, 12):
|
||||
async def start_tls(
|
||||
self,
|
||||
sslcontext: ssl.SSLContext,
|
||||
*,
|
||||
server_hostname: str | None = None,
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
ssl_shutdown_timeout: float | None = None,
|
||||
) -> None: ...
|
||||
elif sys.version_info >= (3, 11):
|
||||
async def start_tls(
|
||||
self,
|
||||
sslcontext: ssl.SSLContext,
|
||||
*,
|
||||
server_hostname: str | None = None,
|
||||
ssl_handshake_timeout: float | None = None,
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
def __del__(self, warnings: ModuleType = ...) -> None: ...
|
||||
elif sys.version_info >= (3, 11):
|
||||
def __del__(self) -> None: ...
|
||||
|
||||
# StreamReader: TypeAlias = StreamWriter
|
||||
|
||||
class StreamReader:
|
||||
def __init__(
|
||||
self,
|
||||
transport: transports.WriteTransport | Incomplete,
|
||||
protocol: protocols.BaseProtocol | Incomplete = None,
|
||||
# limit: int = 65536,
|
||||
# loop: events.AbstractEventLoop | None = None,
|
||||
) -> None: ...
|
||||
def exception(self) -> Exception: ...
|
||||
def set_exception(self, exc: Exception) -> None: ...
|
||||
def set_transport(self, transport: transports.BaseTransport) -> None: ...
|
||||
def feed_eof(self) -> None: ...
|
||||
def at_eof(self) -> bool: ...
|
||||
def feed_data(self, data: Iterable[SupportsIndex]) -> None: ...
|
||||
async def readline(self) -> bytes: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
async def readuntil(self, separator: _ReaduntilBuffer | tuple[_ReaduntilBuffer, ...] = b"\n") -> bytes: ...
|
||||
else:
|
||||
async def readuntil(self, separator: _ReaduntilBuffer = b"\n") -> bytes: ...
|
||||
|
||||
async def read(self, n: int = -1) -> bytes: ...
|
||||
async def readexactly(self, n: int) -> bytes: ...
|
||||
def __aiter__(self) -> Self: ...
|
||||
async def __anext__(self) -> bytes: ...
|
||||
500
.venv/lib/python3.12/site-packages/stdlib/asyncio/tasks.pyi
Normal file
500
.venv/lib/python3.12/site-packages/stdlib/asyncio/tasks.pyi
Normal file
@@ -0,0 +1,500 @@
|
||||
import concurrent.futures
|
||||
import sys
|
||||
from _asyncio import (
|
||||
Task as Task,
|
||||
_enter_task as _enter_task,
|
||||
_leave_task as _leave_task,
|
||||
_register_task as _register_task,
|
||||
_unregister_task as _unregister_task,
|
||||
)
|
||||
from collections.abc import AsyncIterator, Awaitable, Coroutine, Generator, Iterable, Iterator
|
||||
from typing import Any, Literal, Protocol, TypeVar, overload
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
from . import _CoroutineLike
|
||||
from .events import AbstractEventLoop
|
||||
from .futures import Future
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
from contextvars import Context
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
__all__ = (
|
||||
"Task",
|
||||
"create_task",
|
||||
"FIRST_COMPLETED",
|
||||
"FIRST_EXCEPTION",
|
||||
"ALL_COMPLETED",
|
||||
"wait",
|
||||
"wait_for",
|
||||
"as_completed",
|
||||
"sleep",
|
||||
"sleep_ms",
|
||||
"gather",
|
||||
"shield",
|
||||
"ensure_future",
|
||||
"run_coroutine_threadsafe",
|
||||
"current_task",
|
||||
"all_tasks",
|
||||
"create_eager_task_factory",
|
||||
"eager_task_factory",
|
||||
"_register_task",
|
||||
"_unregister_task",
|
||||
"_enter_task",
|
||||
"_leave_task",
|
||||
)
|
||||
else:
|
||||
__all__ = (
|
||||
"Task",
|
||||
"create_task",
|
||||
"FIRST_COMPLETED",
|
||||
"FIRST_EXCEPTION",
|
||||
"ALL_COMPLETED",
|
||||
"wait",
|
||||
"wait_for",
|
||||
"as_completed",
|
||||
"sleep",
|
||||
"gather",
|
||||
"shield",
|
||||
"ensure_future",
|
||||
"run_coroutine_threadsafe",
|
||||
"current_task",
|
||||
"all_tasks",
|
||||
"_register_task",
|
||||
"_unregister_task",
|
||||
"_enter_task",
|
||||
"_leave_task",
|
||||
)
|
||||
|
||||
_T = TypeVar("_T")
|
||||
_T_co = TypeVar("_T_co", covariant=True)
|
||||
_T1 = TypeVar("_T1")
|
||||
_T2 = TypeVar("_T2")
|
||||
_T3 = TypeVar("_T3")
|
||||
_T4 = TypeVar("_T4")
|
||||
_T5 = TypeVar("_T5")
|
||||
_T6 = TypeVar("_T6")
|
||||
_FT = TypeVar("_FT", bound=Future[Any])
|
||||
if sys.version_info >= (3, 12):
|
||||
_FutureLike: TypeAlias = Future[_T] | Awaitable[_T]
|
||||
else:
|
||||
_FutureLike: TypeAlias = Future[_T] | Generator[Any, None, _T] | Awaitable[_T]
|
||||
_TaskYieldType: TypeAlias = Future[object] | None
|
||||
|
||||
FIRST_COMPLETED = concurrent.futures.FIRST_COMPLETED
|
||||
FIRST_EXCEPTION = concurrent.futures.FIRST_EXCEPTION
|
||||
ALL_COMPLETED = concurrent.futures.ALL_COMPLETED
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
class _SyncAndAsyncIterator(Iterator[_T_co], AsyncIterator[_T_co], Protocol[_T_co]): ...
|
||||
|
||||
def as_completed(fs: Iterable[_FutureLike[_T]], *, timeout: float | None = None) -> _SyncAndAsyncIterator[Future[_T]]: ...
|
||||
|
||||
elif sys.version_info >= (3, 10):
|
||||
def as_completed(fs: Iterable[_FutureLike[_T]], *, timeout: float | None = None) -> Iterator[Future[_T]]: ...
|
||||
|
||||
else:
|
||||
def as_completed(
|
||||
fs: Iterable[_FutureLike[_T]],
|
||||
*,
|
||||
loop: AbstractEventLoop | None = None,
|
||||
timeout: float | None = None,
|
||||
) -> Iterator[Future[_T]]: ...
|
||||
|
||||
@overload
|
||||
def ensure_future(coro_or_future: _FT, *, loop: AbstractEventLoop | None = None) -> _FT: ... # type: ignore[overload-overlap]
|
||||
@overload
|
||||
def ensure_future(coro_or_future: Awaitable[_T], *, loop: AbstractEventLoop | None = None) -> Task[_T]: ...
|
||||
|
||||
# `gather()` actually returns a list with length equal to the number
|
||||
# of tasks passed; however, Tuple is used similar to the annotation for
|
||||
# zip() because typing does not support variadic type variables. See
|
||||
# typing PR #1550 for discussion.
|
||||
#
|
||||
# N.B. Having overlapping overloads is the only way to get acceptable type inference in all edge cases.
|
||||
if sys.version_info >= (3, 10):
|
||||
@overload
|
||||
def gather(coro_or_future1: _FutureLike[_T1], /, *, return_exceptions: Literal[False] = False) -> Future[tuple[_T1]]: ... # type: ignore[overload-overlap]
|
||||
@overload
|
||||
def gather( # type: ignore[overload-overlap]
|
||||
coro_or_future1: _FutureLike[_T1],
|
||||
coro_or_future2: _FutureLike[_T2],
|
||||
/,
|
||||
*,
|
||||
return_exceptions: Literal[False] = False,
|
||||
) -> Future[tuple[_T1, _T2]]: ...
|
||||
@overload
|
||||
def gather( # type: ignore[overload-overlap]
|
||||
coro_or_future1: _FutureLike[_T1],
|
||||
coro_or_future2: _FutureLike[_T2],
|
||||
coro_or_future3: _FutureLike[_T3],
|
||||
/,
|
||||
*,
|
||||
return_exceptions: Literal[False] = False,
|
||||
) -> Future[tuple[_T1, _T2, _T3]]: ...
|
||||
@overload
|
||||
def gather( # type: ignore[overload-overlap]
|
||||
coro_or_future1: _FutureLike[_T1],
|
||||
coro_or_future2: _FutureLike[_T2],
|
||||
coro_or_future3: _FutureLike[_T3],
|
||||
coro_or_future4: _FutureLike[_T4],
|
||||
/,
|
||||
*,
|
||||
return_exceptions: Literal[False] = False,
|
||||
) -> Future[tuple[_T1, _T2, _T3, _T4]]: ...
|
||||
@overload
|
||||
def gather( # type: ignore[overload-overlap]
|
||||
coro_or_future1: _FutureLike[_T1],
|
||||
coro_or_future2: _FutureLike[_T2],
|
||||
coro_or_future3: _FutureLike[_T3],
|
||||
coro_or_future4: _FutureLike[_T4],
|
||||
coro_or_future5: _FutureLike[_T5],
|
||||
/,
|
||||
*,
|
||||
return_exceptions: Literal[False] = False,
|
||||
) -> Future[tuple[_T1, _T2, _T3, _T4, _T5]]: ...
|
||||
@overload
|
||||
def gather( # type: ignore[overload-overlap]
|
||||
coro_or_future1: _FutureLike[_T1],
|
||||
coro_or_future2: _FutureLike[_T2],
|
||||
coro_or_future3: _FutureLike[_T3],
|
||||
coro_or_future4: _FutureLike[_T4],
|
||||
coro_or_future5: _FutureLike[_T5],
|
||||
coro_or_future6: _FutureLike[_T6],
|
||||
/,
|
||||
*,
|
||||
return_exceptions: Literal[False] = False,
|
||||
) -> Future[tuple[_T1, _T2, _T3, _T4, _T5, _T6]]: ...
|
||||
@overload
|
||||
def gather(*coros_or_futures: _FutureLike[_T], return_exceptions: Literal[False] = False) -> Future[list[_T]]: ... # type: ignore[overload-overlap]
|
||||
@overload
|
||||
def gather(coro_or_future1: _FutureLike[_T1], /, *, return_exceptions: bool) -> Future[tuple[_T1 | BaseException]]: ...
|
||||
@overload
|
||||
def gather(
|
||||
coro_or_future1: _FutureLike[_T1],
|
||||
coro_or_future2: _FutureLike[_T2],
|
||||
/,
|
||||
*,
|
||||
return_exceptions: bool,
|
||||
) -> Future[tuple[_T1 | BaseException, _T2 | BaseException]]: ...
|
||||
@overload
|
||||
def gather(
|
||||
coro_or_future1: _FutureLike[_T1],
|
||||
coro_or_future2: _FutureLike[_T2],
|
||||
coro_or_future3: _FutureLike[_T3],
|
||||
/,
|
||||
*,
|
||||
return_exceptions: bool,
|
||||
) -> Future[tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException]]: ...
|
||||
@overload
|
||||
def gather(
|
||||
coro_or_future1: _FutureLike[_T1],
|
||||
coro_or_future2: _FutureLike[_T2],
|
||||
coro_or_future3: _FutureLike[_T3],
|
||||
coro_or_future4: _FutureLike[_T4],
|
||||
/,
|
||||
*,
|
||||
return_exceptions: bool,
|
||||
) -> Future[tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException, _T4 | BaseException]]: ...
|
||||
@overload
|
||||
def gather(
|
||||
coro_or_future1: _FutureLike[_T1],
|
||||
coro_or_future2: _FutureLike[_T2],
|
||||
coro_or_future3: _FutureLike[_T3],
|
||||
coro_or_future4: _FutureLike[_T4],
|
||||
coro_or_future5: _FutureLike[_T5],
|
||||
/,
|
||||
*,
|
||||
return_exceptions: bool,
|
||||
) -> Future[
|
||||
tuple[
|
||||
_T1 | BaseException,
|
||||
_T2 | BaseException,
|
||||
_T3 | BaseException,
|
||||
_T4 | BaseException,
|
||||
_T5 | BaseException,
|
||||
]
|
||||
]: ...
|
||||
@overload
|
||||
def gather(
|
||||
coro_or_future1: _FutureLike[_T1],
|
||||
coro_or_future2: _FutureLike[_T2],
|
||||
coro_or_future3: _FutureLike[_T3],
|
||||
coro_or_future4: _FutureLike[_T4],
|
||||
coro_or_future5: _FutureLike[_T5],
|
||||
coro_or_future6: _FutureLike[_T6],
|
||||
/,
|
||||
*,
|
||||
return_exceptions: bool,
|
||||
) -> Future[
|
||||
tuple[
|
||||
_T1 | BaseException,
|
||||
_T2 | BaseException,
|
||||
_T3 | BaseException,
|
||||
_T4 | BaseException,
|
||||
_T5 | BaseException,
|
||||
_T6 | BaseException,
|
||||
]
|
||||
]: ...
|
||||
@overload
|
||||
def gather(*coros_or_futures: _FutureLike[_T], return_exceptions: bool) -> Future[list[_T | BaseException]]: ...
|
||||
|
||||
else:
|
||||
@overload
|
||||
def gather( # type: ignore[overload-overlap]
|
||||
coro_or_future1: _FutureLike[_T1],
|
||||
/,
|
||||
*,
|
||||
loop: AbstractEventLoop | None = None,
|
||||
return_exceptions: Literal[False] = False,
|
||||
) -> Future[tuple[_T1]]: ...
|
||||
@overload
|
||||
def gather( # type: ignore[overload-overlap]
|
||||
coro_or_future1: _FutureLike[_T1],
|
||||
coro_or_future2: _FutureLike[_T2],
|
||||
/,
|
||||
*,
|
||||
loop: AbstractEventLoop | None = None,
|
||||
return_exceptions: Literal[False] = False,
|
||||
) -> Future[tuple[_T1, _T2]]: ...
|
||||
@overload
|
||||
def gather( # type: ignore[overload-overlap]
|
||||
coro_or_future1: _FutureLike[_T1],
|
||||
coro_or_future2: _FutureLike[_T2],
|
||||
coro_or_future3: _FutureLike[_T3],
|
||||
/,
|
||||
*,
|
||||
loop: AbstractEventLoop | None = None,
|
||||
return_exceptions: Literal[False] = False,
|
||||
) -> Future[tuple[_T1, _T2, _T3]]: ...
|
||||
@overload
|
||||
def gather( # type: ignore[overload-overlap]
|
||||
coro_or_future1: _FutureLike[_T1],
|
||||
coro_or_future2: _FutureLike[_T2],
|
||||
coro_or_future3: _FutureLike[_T3],
|
||||
coro_or_future4: _FutureLike[_T4],
|
||||
/,
|
||||
*,
|
||||
loop: AbstractEventLoop | None = None,
|
||||
return_exceptions: Literal[False] = False,
|
||||
) -> Future[tuple[_T1, _T2, _T3, _T4]]: ...
|
||||
@overload
|
||||
def gather( # type: ignore[overload-overlap]
|
||||
coro_or_future1: _FutureLike[_T1],
|
||||
coro_or_future2: _FutureLike[_T2],
|
||||
coro_or_future3: _FutureLike[_T3],
|
||||
coro_or_future4: _FutureLike[_T4],
|
||||
coro_or_future5: _FutureLike[_T5],
|
||||
/,
|
||||
*,
|
||||
loop: AbstractEventLoop | None = None,
|
||||
return_exceptions: Literal[False] = False,
|
||||
) -> Future[tuple[_T1, _T2, _T3, _T4, _T5]]: ...
|
||||
@overload
|
||||
def gather( # type: ignore[overload-overlap]
|
||||
coro_or_future1: _FutureLike[_T1],
|
||||
coro_or_future2: _FutureLike[_T2],
|
||||
coro_or_future3: _FutureLike[_T3],
|
||||
coro_or_future4: _FutureLike[_T4],
|
||||
coro_or_future5: _FutureLike[_T5],
|
||||
coro_or_future6: _FutureLike[_T6],
|
||||
/,
|
||||
*,
|
||||
loop: AbstractEventLoop | None = None,
|
||||
return_exceptions: Literal[False] = False,
|
||||
) -> Future[tuple[_T1, _T2, _T3, _T4, _T5, _T6]]: ...
|
||||
@overload
|
||||
def gather( # type: ignore[overload-overlap]
|
||||
*coros_or_futures: _FutureLike[_T],
|
||||
loop: AbstractEventLoop | None = None,
|
||||
return_exceptions: Literal[False] = False,
|
||||
) -> Future[list[_T]]: ...
|
||||
@overload
|
||||
def gather( # type: ignore[overload-overlap]
|
||||
coro_or_future1: _FutureLike[_T1],
|
||||
/,
|
||||
*,
|
||||
loop: AbstractEventLoop | None = None,
|
||||
return_exceptions: bool,
|
||||
) -> Future[tuple[_T1 | BaseException]]: ...
|
||||
@overload
|
||||
def gather( # type: ignore[overload-overlap]
|
||||
coro_or_future1: _FutureLike[_T1],
|
||||
coro_or_future2: _FutureLike[_T2],
|
||||
/,
|
||||
*,
|
||||
loop: AbstractEventLoop | None = None,
|
||||
return_exceptions: bool,
|
||||
) -> Future[tuple[_T1 | BaseException, _T2 | BaseException]]: ...
|
||||
@overload
|
||||
def gather( # type: ignore[overload-overlap]
|
||||
coro_or_future1: _FutureLike[_T1],
|
||||
coro_or_future2: _FutureLike[_T2],
|
||||
coro_or_future3: _FutureLike[_T3],
|
||||
/,
|
||||
*,
|
||||
loop: AbstractEventLoop | None = None,
|
||||
return_exceptions: bool,
|
||||
) -> Future[tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException]]: ...
|
||||
@overload
|
||||
def gather( # type: ignore[overload-overlap]
|
||||
coro_or_future1: _FutureLike[_T1],
|
||||
coro_or_future2: _FutureLike[_T2],
|
||||
coro_or_future3: _FutureLike[_T3],
|
||||
coro_or_future4: _FutureLike[_T4],
|
||||
/,
|
||||
*,
|
||||
loop: AbstractEventLoop | None = None,
|
||||
return_exceptions: bool,
|
||||
) -> Future[tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException, _T4 | BaseException]]: ...
|
||||
@overload
|
||||
def gather( # type: ignore[overload-overlap]
|
||||
coro_or_future1: _FutureLike[_T1],
|
||||
coro_or_future2: _FutureLike[_T2],
|
||||
coro_or_future3: _FutureLike[_T3],
|
||||
coro_or_future4: _FutureLike[_T4],
|
||||
coro_or_future5: _FutureLike[_T5],
|
||||
coro_or_future6: _FutureLike[_T6],
|
||||
/,
|
||||
*,
|
||||
loop: AbstractEventLoop | None = None,
|
||||
return_exceptions: bool,
|
||||
) -> Future[
|
||||
tuple[
|
||||
_T1 | BaseException,
|
||||
_T2 | BaseException,
|
||||
_T3 | BaseException,
|
||||
_T4 | BaseException,
|
||||
_T5 | BaseException,
|
||||
_T6 | BaseException,
|
||||
]
|
||||
]: ...
|
||||
@overload
|
||||
def gather(
|
||||
*coros_or_futures: _FutureLike[_T],
|
||||
loop: AbstractEventLoop | None = None,
|
||||
return_exceptions: bool,
|
||||
) -> Future[list[_T | BaseException]]: ...
|
||||
|
||||
def run_coroutine_threadsafe(coro: _FutureLike[_T], loop: AbstractEventLoop) -> concurrent.futures.Future[_T]: ...
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
def shield(arg: _FutureLike[_T]) -> Future[_T]: ...
|
||||
@overload
|
||||
async def sleep(delay: float) -> None: ...
|
||||
@overload
|
||||
async def sleep(delay: float, result: _T) -> _T: ...
|
||||
async def wait_for(fut: _FutureLike[_T], timeout: float | None) -> _T: ...
|
||||
# MicroPython addition
|
||||
@overload
|
||||
async def sleep_ms(delay: float) -> None: ...
|
||||
@overload
|
||||
async def sleep_ms(delay: int) -> None: ...
|
||||
|
||||
else:
|
||||
def shield(arg: _FutureLike[_T], *, loop: AbstractEventLoop | None = None) -> Future[_T]: ...
|
||||
@overload
|
||||
async def sleep(delay: float, *, loop: AbstractEventLoop | None = None) -> None: ...
|
||||
@overload
|
||||
async def sleep(delay: float, result: _T, *, loop: AbstractEventLoop | None = None) -> _T: ...
|
||||
async def wait_for(fut: _FutureLike[_T], timeout: float | None, *, loop: AbstractEventLoop | None = None) -> _T: ...
|
||||
# MicroPython addition
|
||||
@overload
|
||||
async def sleep_ms(delay: float) -> None: ...
|
||||
@overload
|
||||
async def sleep_ms(delay: int) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
@overload
|
||||
async def wait(fs: Iterable[_FT], *, timeout: float | None = None, return_when: str = "ALL_COMPLETED") -> tuple[set[_FT], set[_FT]]: ...
|
||||
@overload
|
||||
async def wait(
|
||||
fs: Iterable[Task[_T]], *, timeout: float | None = None, return_when: str = "ALL_COMPLETED"
|
||||
) -> tuple[set[Task[_T]], set[Task[_T]]]: ...
|
||||
|
||||
elif sys.version_info >= (3, 10):
|
||||
@overload
|
||||
async def wait( # type: ignore[overload-overlap]
|
||||
fs: Iterable[_FT], *, timeout: float | None = None, return_when: str = "ALL_COMPLETED"
|
||||
) -> tuple[set[_FT], set[_FT]]: ...
|
||||
@overload
|
||||
async def wait(
|
||||
fs: Iterable[Awaitable[_T]],
|
||||
*,
|
||||
timeout: float | None = None,
|
||||
return_when: str = "ALL_COMPLETED",
|
||||
) -> tuple[set[Task[_T]], set[Task[_T]]]: ...
|
||||
|
||||
else:
|
||||
@overload
|
||||
async def wait( # type: ignore[overload-overlap]
|
||||
fs: Iterable[_FT],
|
||||
*,
|
||||
loop: AbstractEventLoop | None = None,
|
||||
timeout: float | None = None,
|
||||
return_when: str = "ALL_COMPLETED",
|
||||
) -> tuple[set[_FT], set[_FT]]: ...
|
||||
@overload
|
||||
async def wait(
|
||||
fs: Iterable[Awaitable[_T]],
|
||||
*,
|
||||
loop: AbstractEventLoop | None = None,
|
||||
timeout: float | None = None,
|
||||
return_when: str = "ALL_COMPLETED",
|
||||
) -> tuple[set[Task[_T]], set[Task[_T]]]: ...
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
_TaskCompatibleCoro: TypeAlias = Coroutine[Any, Any, _T_co]
|
||||
elif sys.version_info >= (3, 9):
|
||||
_TaskCompatibleCoro: TypeAlias = Generator[_TaskYieldType, None, _T_co] | Coroutine[Any, Any, _T_co]
|
||||
else:
|
||||
_TaskCompatibleCoro: TypeAlias = Generator[_TaskYieldType, None, _T_co] | Awaitable[_T_co]
|
||||
|
||||
def all_tasks(loop: AbstractEventLoop | None = None) -> set[Task[Any]]: ...
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
def create_task(coro: _CoroutineLike[_T], *, name: str | None = None, context: Context | None = None) -> Task[_T]: ...
|
||||
|
||||
else:
|
||||
def create_task(coro: _CoroutineLike[_T], *, name: str | None = None) -> Task[_T]: ...
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
from _asyncio import current_task as current_task
|
||||
else:
|
||||
def current_task(loop: AbstractEventLoop | None = None) -> Task[Any] | None: ...
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
_TaskT_co = TypeVar("_TaskT_co", bound=Task[Any], covariant=True)
|
||||
|
||||
class _CustomTaskConstructor(Protocol[_TaskT_co]):
|
||||
def __call__(
|
||||
self,
|
||||
coro: _TaskCompatibleCoro[Any],
|
||||
/,
|
||||
*,
|
||||
loop: AbstractEventLoop,
|
||||
name: str | None,
|
||||
context: Context | None,
|
||||
eager_start: bool,
|
||||
) -> _TaskT_co: ...
|
||||
|
||||
class _EagerTaskFactoryType(Protocol[_TaskT_co]):
|
||||
def __call__(
|
||||
self,
|
||||
loop: AbstractEventLoop,
|
||||
coro: _TaskCompatibleCoro[Any],
|
||||
*,
|
||||
name: str | None = None,
|
||||
context: Context | None = None,
|
||||
) -> _TaskT_co: ...
|
||||
|
||||
def create_eager_task_factory(
|
||||
custom_task_constructor: _CustomTaskConstructor[_TaskT_co],
|
||||
) -> _EagerTaskFactoryType[_TaskT_co]: ...
|
||||
def eager_task_factory(
|
||||
loop: AbstractEventLoop | None,
|
||||
coro: _TaskCompatibleCoro[_T_co],
|
||||
*,
|
||||
name: str | None = None,
|
||||
context: Context | None = None,
|
||||
) -> Task[_T_co]: ...
|
||||
@@ -0,0 +1,9 @@
|
||||
from collections.abc import Callable
|
||||
from typing import TypeVar
|
||||
from typing_extensions import ParamSpec
|
||||
|
||||
__all__ = ("to_thread",)
|
||||
_P = ParamSpec("_P")
|
||||
_R = TypeVar("_R")
|
||||
|
||||
async def to_thread(func: Callable[_P, _R], /, *args: _P.args, **kwargs: _P.kwargs) -> _R: ...
|
||||
@@ -0,0 +1,19 @@
|
||||
from types import TracebackType
|
||||
from typing import final
|
||||
from typing_extensions import Self
|
||||
|
||||
__all__ = ("Timeout", "timeout", "timeout_at")
|
||||
|
||||
@final
|
||||
class Timeout:
|
||||
def __init__(self, when: float | None) -> None: ...
|
||||
def when(self) -> float | None: ...
|
||||
def reschedule(self, when: float | None) -> None: ...
|
||||
def expired(self) -> bool: ...
|
||||
async def __aenter__(self) -> Self: ...
|
||||
async def __aexit__(
|
||||
self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None
|
||||
) -> None: ...
|
||||
|
||||
def timeout(delay: float | None) -> Timeout: ...
|
||||
def timeout_at(when: float | None) -> Timeout: ...
|
||||
@@ -0,0 +1,47 @@
|
||||
from asyncio.events import AbstractEventLoop
|
||||
from asyncio.protocols import BaseProtocol
|
||||
from collections.abc import Iterable, Mapping
|
||||
from socket import _Address
|
||||
from typing import Any
|
||||
|
||||
__all__ = ("BaseTransport", "ReadTransport", "WriteTransport", "Transport", "DatagramTransport", "SubprocessTransport")
|
||||
|
||||
class BaseTransport:
|
||||
def __init__(self, extra: Mapping[str, Any] | None = None) -> None: ...
|
||||
def get_extra_info(self, name: str, default: Any = None) -> Any: ...
|
||||
def is_closing(self) -> bool: ...
|
||||
def close(self) -> None: ...
|
||||
def set_protocol(self, protocol: BaseProtocol) -> None: ...
|
||||
def get_protocol(self) -> BaseProtocol: ...
|
||||
|
||||
class ReadTransport(BaseTransport):
|
||||
def is_reading(self) -> bool: ...
|
||||
def pause_reading(self) -> None: ...
|
||||
def resume_reading(self) -> None: ...
|
||||
|
||||
class WriteTransport(BaseTransport):
|
||||
def set_write_buffer_limits(self, high: int | None = None, low: int | None = None) -> None: ...
|
||||
def get_write_buffer_size(self) -> int: ...
|
||||
def get_write_buffer_limits(self) -> tuple[int, int]: ...
|
||||
def write(self, data: bytes | bytearray | memoryview) -> None: ...
|
||||
def writelines(self, list_of_data: Iterable[bytes | bytearray | memoryview]) -> None: ...
|
||||
def write_eof(self) -> None: ...
|
||||
def can_write_eof(self) -> bool: ...
|
||||
def abort(self) -> None: ...
|
||||
|
||||
class Transport(ReadTransport, WriteTransport): ...
|
||||
|
||||
class DatagramTransport(BaseTransport):
|
||||
def sendto(self, data: bytes | bytearray | memoryview, addr: _Address | None = None) -> None: ...
|
||||
def abort(self) -> None: ...
|
||||
|
||||
class SubprocessTransport(BaseTransport):
|
||||
def get_pid(self) -> int: ...
|
||||
def get_returncode(self) -> int | None: ...
|
||||
def get_pipe_transport(self, fd: int) -> BaseTransport | None: ...
|
||||
def send_signal(self, signal: int) -> None: ...
|
||||
def terminate(self) -> None: ...
|
||||
def kill(self) -> None: ...
|
||||
|
||||
class _FlowControlMixin(Transport):
|
||||
def __init__(self, extra: Mapping[str, Any] | None = None, loop: AbstractEventLoop | None = None) -> None: ...
|
||||
92
.venv/lib/python3.12/site-packages/stdlib/asyncio/trsock.pyi
Normal file
92
.venv/lib/python3.12/site-packages/stdlib/asyncio/trsock.pyi
Normal file
@@ -0,0 +1,92 @@
|
||||
import socket
|
||||
import sys
|
||||
from _typeshed import ReadableBuffer
|
||||
from builtins import type as Type # alias to avoid name clashes with property named "type"
|
||||
from collections.abc import Iterable
|
||||
from types import TracebackType
|
||||
from typing import Any, BinaryIO, NoReturn, overload
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
# These are based in socket, maybe move them out into _typeshed.pyi or such
|
||||
_Address: TypeAlias = socket._Address
|
||||
_RetAddress: TypeAlias = Any
|
||||
_WriteBuffer: TypeAlias = bytearray | memoryview
|
||||
_CMSG: TypeAlias = tuple[int, int, bytes]
|
||||
|
||||
class TransportSocket:
|
||||
def __init__(self, sock: socket.socket) -> None: ...
|
||||
@property
|
||||
def family(self) -> int: ...
|
||||
@property
|
||||
def type(self) -> int: ...
|
||||
@property
|
||||
def proto(self) -> int: ...
|
||||
def __getstate__(self) -> NoReturn: ...
|
||||
def fileno(self) -> int: ...
|
||||
def dup(self) -> socket.socket: ...
|
||||
def get_inheritable(self) -> bool: ...
|
||||
def shutdown(self, how: int) -> None: ...
|
||||
@overload
|
||||
def getsockopt(self, level: int, optname: int) -> int: ...
|
||||
@overload
|
||||
def getsockopt(self, level: int, optname: int, buflen: int) -> bytes: ...
|
||||
@overload
|
||||
def setsockopt(self, level: int, optname: int, value: int | ReadableBuffer) -> None: ...
|
||||
@overload
|
||||
def setsockopt(self, level: int, optname: int, value: None, optlen: int) -> None: ...
|
||||
def getpeername(self) -> _RetAddress: ...
|
||||
def getsockname(self) -> _RetAddress: ...
|
||||
def getsockbyname(self) -> NoReturn: ... # This method doesn't exist on socket, yet is passed through?
|
||||
def settimeout(self, value: float | None) -> None: ...
|
||||
def gettimeout(self) -> float | None: ...
|
||||
def setblocking(self, flag: bool) -> None: ...
|
||||
if sys.version_info < (3, 11):
|
||||
def _na(self, what: str) -> None: ...
|
||||
def accept(self) -> tuple[socket.socket, _RetAddress]: ...
|
||||
def connect(self, address: _Address) -> None: ...
|
||||
def connect_ex(self, address: _Address) -> int: ...
|
||||
def bind(self, address: _Address) -> None: ...
|
||||
if sys.platform == "win32":
|
||||
def ioctl(self, control: int, option: int | tuple[int, int, int] | bool) -> None: ...
|
||||
else:
|
||||
def ioctl(self, control: int, option: int | tuple[int, int, int] | bool) -> NoReturn: ...
|
||||
|
||||
def listen(self, backlog: int = ..., /) -> None: ...
|
||||
def makefile(self) -> BinaryIO: ...
|
||||
def sendfile(self, file: BinaryIO, offset: int = ..., count: int | None = ...) -> int: ...
|
||||
def close(self) -> None: ...
|
||||
def detach(self) -> int: ...
|
||||
if sys.platform == "linux":
|
||||
def sendmsg_afalg(
|
||||
self, msg: Iterable[ReadableBuffer] = ..., *, op: int, iv: Any = ..., assoclen: int = ..., flags: int = ...
|
||||
) -> int: ...
|
||||
else:
|
||||
def sendmsg_afalg(
|
||||
self, msg: Iterable[ReadableBuffer] = ..., *, op: int, iv: Any = ..., assoclen: int = ..., flags: int = ...
|
||||
) -> NoReturn: ...
|
||||
|
||||
def sendmsg(
|
||||
self, buffers: Iterable[ReadableBuffer], ancdata: Iterable[_CMSG] = ..., flags: int = ..., address: _Address = ..., /
|
||||
) -> int: ...
|
||||
@overload
|
||||
def sendto(self, data: ReadableBuffer, address: _Address) -> int: ...
|
||||
@overload
|
||||
def sendto(self, data: ReadableBuffer, flags: int, address: _Address) -> int: ...
|
||||
def send(self, data: ReadableBuffer, flags: int = ...) -> int: ...
|
||||
def sendall(self, data: ReadableBuffer, flags: int = ...) -> None: ...
|
||||
def set_inheritable(self, inheritable: bool) -> None: ...
|
||||
if sys.platform == "win32":
|
||||
def share(self, process_id: int) -> bytes: ...
|
||||
else:
|
||||
def share(self, process_id: int) -> NoReturn: ...
|
||||
|
||||
def recv_into(self, buffer: _WriteBuffer, nbytes: int = ..., flags: int = ...) -> int: ...
|
||||
def recvfrom_into(self, buffer: _WriteBuffer, nbytes: int = ..., flags: int = ...) -> tuple[int, _RetAddress]: ...
|
||||
def recvmsg_into(
|
||||
self, buffers: Iterable[_WriteBuffer], ancbufsize: int = ..., flags: int = ..., /
|
||||
) -> tuple[int, list[_CMSG], int, Any]: ...
|
||||
def recvmsg(self, bufsize: int, ancbufsize: int = ..., flags: int = ..., /) -> tuple[bytes, list[_CMSG], int, Any]: ...
|
||||
def recvfrom(self, bufsize: int, flags: int = ...) -> tuple[bytes, _RetAddress]: ...
|
||||
def recv(self, bufsize: int, flags: int = ...) -> bytes: ...
|
||||
def __enter__(self) -> socket.socket: ...
|
||||
def __exit__(self, exc_type: Type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None) -> None: ...
|
||||
2074
.venv/lib/python3.12/site-packages/stdlib/builtins.pyi
Normal file
2074
.venv/lib/python3.12/site-packages/stdlib/builtins.pyi
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,588 @@
|
||||
"""
|
||||
Collection and container types.
|
||||
|
||||
MicroPython module: https://docs.micropython.org/en/v1.26.0/library/collections.html
|
||||
|
||||
CPython module: :mod:`python:collections` https://docs.python.org/3/library/collections.html .
|
||||
|
||||
This module implements advanced collection and container types to
|
||||
hold/accumulate various objects.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
import sys
|
||||
from _collections_abc import dict_items, dict_keys, dict_values
|
||||
from _typeshed import Incomplete, SupportsItems, SupportsKeysAndGetItem, SupportsRichComparison, SupportsRichComparisonT
|
||||
from typing import Dict, Any, Generic, NoReturn, SupportsIndex, TypeVar, final, overload
|
||||
from typing_extensions import Awaitable, TypeAlias, TypeVar, Self
|
||||
from collections.abc import Iterable
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
from types import GenericAlias
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
from collections.abc import (
|
||||
Callable,
|
||||
ItemsView,
|
||||
Iterable,
|
||||
Iterator,
|
||||
KeysView,
|
||||
Mapping,
|
||||
MutableMapping,
|
||||
MutableSequence,
|
||||
Sequence,
|
||||
ValuesView,
|
||||
)
|
||||
else:
|
||||
from _collections_abc import *
|
||||
|
||||
__all__ = ["OrderedDict", "defaultdict", "deque", "namedtuple"]
|
||||
|
||||
_S = TypeVar("_S")
|
||||
_T = TypeVar("_T")
|
||||
_T1 = TypeVar("_T1")
|
||||
_T2 = TypeVar("_T2")
|
||||
_KT = TypeVar("_KT")
|
||||
_VT = TypeVar("_VT")
|
||||
_KT_co = TypeVar("_KT_co", covariant=True)
|
||||
_VT_co = TypeVar("_VT_co", covariant=True)
|
||||
|
||||
# namedtuple is special-cased in the type checker; the initializer is ignored.
|
||||
def namedtuple(name: str, fields: str | Iterable[str]) -> type[tuple[Any, ...]]:
|
||||
"""
|
||||
This is factory function to create a new namedtuple type with a specific
|
||||
name and set of fields. A namedtuple is a subclass of tuple which allows
|
||||
to access its fields not just by numeric index, but also with an attribute
|
||||
access syntax using symbolic field names. Fields is a sequence of strings
|
||||
specifying field names. For compatibility with CPython it can also be a
|
||||
a string with space-separated field named (but this is less efficient).
|
||||
Example of use::
|
||||
|
||||
from collections import namedtuple
|
||||
|
||||
MyTuple = namedtuple("MyTuple", ("id", "name"))
|
||||
t1 = MyTuple(1, "foo")
|
||||
t2 = MyTuple(2, "bar")
|
||||
print(t1.name)
|
||||
assert t2.name == t2[1]
|
||||
"""
|
||||
...
|
||||
|
||||
class UserDict(MutableMapping[_KT, _VT]): # type: ignore
|
||||
data: dict[_KT, _VT] # type: ignore
|
||||
# __init__ should be kept roughly in line with `dict.__init__`, which has the same semantics
|
||||
@overload
|
||||
def __init__(self, dict: None = None, /) -> None: ...
|
||||
@overload
|
||||
def __init__(
|
||||
self: UserDict[str, _VT], dict: None = None, /, **kwargs: _VT # pyright: ignore[reportInvalidTypeVarUse] #11780
|
||||
) -> None: ...
|
||||
@overload
|
||||
def __init__(self, dict: SupportsKeysAndGetItem[_KT, _VT], /) -> None: ... # type: ignore
|
||||
@overload
|
||||
def __init__(
|
||||
self: UserDict[str, _VT], # pyright: ignore[reportInvalidTypeVarUse] #11780
|
||||
dict: SupportsKeysAndGetItem[str, _VT],
|
||||
/,
|
||||
**kwargs: _VT, # type: ignore
|
||||
) -> None: ...
|
||||
@overload
|
||||
def __init__(self, iterable: Iterable[tuple[_KT, _VT]], /) -> None: ... # type: ignore
|
||||
@overload
|
||||
def __init__(
|
||||
self: UserDict[str, _VT], # pyright: ignore[reportInvalidTypeVarUse] #11780
|
||||
iterable: Iterable[tuple[str, _VT]],
|
||||
/,
|
||||
**kwargs: _VT, # type: ignore
|
||||
) -> None: ...
|
||||
@overload
|
||||
def __init__(self: UserDict[str, str], iterable: Iterable[list[str]], /) -> None: ...
|
||||
@overload
|
||||
def __init__(self: UserDict[bytes, bytes], iterable: Iterable[list[bytes]], /) -> None: ...
|
||||
def __len__(self) -> int: ...
|
||||
def __getitem__(self, key: _KT) -> _VT: ... # type: ignore
|
||||
def __setitem__(self, key: _KT, item: _VT) -> None: ... # type: ignore
|
||||
def __delitem__(self, key: _KT) -> None: ...
|
||||
def __iter__(self) -> Iterator[_KT]: ... # type: ignore
|
||||
def __contains__(self, key: object) -> bool: ...
|
||||
def copy(self) -> Self: ...
|
||||
def __copy__(self) -> Self: ...
|
||||
|
||||
# `UserDict.fromkeys` has the same semantics as `dict.fromkeys`, so should be kept in line with `dict.fromkeys`.
|
||||
# TODO: Much like `dict.fromkeys`, the true signature of `UserDict.fromkeys` is inexpressible in the current type system.
|
||||
# See #3800 & https://github.com/python/typing/issues/548#issuecomment-683336963.
|
||||
@classmethod
|
||||
@overload
|
||||
def fromkeys(cls, iterable: Iterable[_T], value: None = None) -> UserDict[_T, Any | None]: ...
|
||||
@classmethod
|
||||
@overload
|
||||
def fromkeys(cls, iterable: Iterable[_T], value: _S) -> UserDict[_T, _S]: ...
|
||||
if sys.version_info >= (3, 9):
|
||||
@overload
|
||||
def __or__(self, other: UserDict[_KT, _VT] | dict[_KT, _VT]) -> Self: ... # type: ignore
|
||||
@overload
|
||||
def __or__(self, other: UserDict[_T1, _T2] | dict[_T1, _T2]) -> UserDict[_KT | _T1, _VT | _T2]: ...
|
||||
@overload
|
||||
def __ror__(self, other: UserDict[_KT, _VT] | dict[_KT, _VT]) -> Self: ... # type: ignore
|
||||
@overload
|
||||
def __ror__(self, other: UserDict[_T1, _T2] | dict[_T1, _T2]) -> UserDict[_KT | _T1, _VT | _T2]: ...
|
||||
# UserDict.__ior__ should be kept roughly in line with MutableMapping.update()
|
||||
@overload # type: ignore[misc]
|
||||
def __ior__(self, other: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ... # type: ignore
|
||||
@overload
|
||||
def __ior__(self, other: Iterable[tuple[_KT, _VT]]) -> Self: ... # type: ignore
|
||||
if sys.version_info >= (3, 12):
|
||||
@overload
|
||||
def get(self, key: _KT, default: None = None) -> _VT | None: ... # type: ignore
|
||||
@overload
|
||||
def get(self, key: _KT, default: _T) -> _VT | _T: ... # type: ignore
|
||||
|
||||
class UserList(MutableSequence[_T]):
|
||||
data: list[_T]
|
||||
@overload
|
||||
def __init__(self, initlist: None = None) -> None: ...
|
||||
@overload
|
||||
def __init__(self, initlist: Iterable[_T]) -> None: ...
|
||||
def __lt__(self, other: list[_T] | UserList[_T]) -> bool: ...
|
||||
def __le__(self, other: list[_T] | UserList[_T]) -> bool: ...
|
||||
def __gt__(self, other: list[_T] | UserList[_T]) -> bool: ...
|
||||
def __ge__(self, other: list[_T] | UserList[_T]) -> bool: ...
|
||||
def __eq__(self, other: object) -> bool: ...
|
||||
def __contains__(self, item: object) -> bool: ...
|
||||
def __len__(self) -> int: ...
|
||||
@overload
|
||||
def __getitem__(self, i: SupportsIndex) -> _T: ... # type: ignore
|
||||
@overload
|
||||
def __getitem__(self, i: slice) -> Self: ...
|
||||
@overload
|
||||
def __setitem__(self, i: SupportsIndex, item: _T) -> None: ...
|
||||
@overload
|
||||
def __setitem__(self, i: slice, item: Iterable[_T]) -> None: ...
|
||||
def __delitem__(self, i: SupportsIndex | slice) -> None: ...
|
||||
def __add__(self, other: Iterable[_T]) -> Self: ...
|
||||
def __radd__(self, other: Iterable[_T]) -> Self: ...
|
||||
def __iadd__(self, other: Iterable[_T]) -> Self: ...
|
||||
def __mul__(self, n: int) -> Self: ...
|
||||
def __rmul__(self, n: int) -> Self: ...
|
||||
def __imul__(self, n: int) -> Self: ...
|
||||
def append(self, item: _T) -> None: ...
|
||||
def insert(self, i: int, item: _T) -> None: ...
|
||||
def pop(self, i: int = -1) -> _T: ... # type: ignore
|
||||
def remove(self, item: _T) -> None: ...
|
||||
def copy(self) -> Self: ...
|
||||
def __copy__(self) -> Self: ...
|
||||
def count(self, item: _T) -> int: ...
|
||||
# The runtime signature is "item, *args", and the arguments are then passed
|
||||
# to `list.index`. In order to give more precise types, we pretend that the
|
||||
# `item` argument is positional-only.
|
||||
def index(self, item: _T, start: SupportsIndex = 0, stop: SupportsIndex = sys.maxsize, /) -> int: ... # type: ignore
|
||||
# All arguments are passed to `list.sort` at runtime, so the signature should be kept in line with `list.sort`.
|
||||
@overload
|
||||
def sort(self: UserList[SupportsRichComparisonT], *, key: None = None, reverse: bool = False) -> None: ...
|
||||
@overload
|
||||
def sort(self, *, key: Callable[[_T], SupportsRichComparison], reverse: bool = False) -> None: ...
|
||||
def extend(self, other: Iterable[_T]) -> None: ...
|
||||
|
||||
class UserString(Sequence[UserString]):
|
||||
data: str
|
||||
def __init__(self, seq: object) -> None: ...
|
||||
def __int__(self) -> int: ...
|
||||
def __float__(self) -> float: ...
|
||||
def __complex__(self) -> complex: ...
|
||||
def __getnewargs__(self) -> tuple[str]: ...
|
||||
def __lt__(self, string: str | UserString) -> bool: ...
|
||||
def __le__(self, string: str | UserString) -> bool: ...
|
||||
def __gt__(self, string: str | UserString) -> bool: ...
|
||||
def __ge__(self, string: str | UserString) -> bool: ...
|
||||
def __eq__(self, string: object) -> bool: ...
|
||||
def __hash__(self) -> int: ...
|
||||
def __contains__(self, char: object) -> bool: ...
|
||||
def __len__(self) -> int: ...
|
||||
def __getitem__(self, index: SupportsIndex | slice) -> Self: ...
|
||||
def __iter__(self) -> Iterator[Self]: ...
|
||||
def __reversed__(self) -> Iterator[Self]: ...
|
||||
def __add__(self, other: object) -> Self: ...
|
||||
def __radd__(self, other: object) -> Self: ...
|
||||
def __mul__(self, n: int) -> Self: ...
|
||||
def __rmul__(self, n: int) -> Self: ...
|
||||
def __mod__(self, args: Any) -> Self: ...
|
||||
def __rmod__(self, template: object) -> Self: ...
|
||||
def capitalize(self) -> Self: ...
|
||||
def casefold(self) -> Self: ...
|
||||
def center(self, width: int, *args: Any) -> Self: ...
|
||||
def count(self, sub: str | UserString, start: int = 0, end: int = sys.maxsize) -> int: ...
|
||||
def encode(self: UserString, encoding: str | None = "utf-8", errors: str | None = "strict") -> bytes: ...
|
||||
def endswith(self, suffix: str | tuple[str, ...], start: int | None = 0, end: int | None = sys.maxsize) -> bool: ...
|
||||
def expandtabs(self, tabsize: int = 8) -> Self: ...
|
||||
def find(self, sub: str | UserString, start: int = 0, end: int = sys.maxsize) -> int: ...
|
||||
def format(self, *args: Any, **kwds: Any) -> str: ...
|
||||
def format_map(self, mapping: Mapping[str, Any]) -> str: ...
|
||||
def index(self, sub: str, start: int = 0, end: int = sys.maxsize) -> int: ...
|
||||
def isalpha(self) -> bool: ...
|
||||
def isalnum(self) -> bool: ...
|
||||
def isdecimal(self) -> bool: ...
|
||||
def isdigit(self) -> bool: ...
|
||||
def isidentifier(self) -> bool: ...
|
||||
def islower(self) -> bool: ...
|
||||
def isnumeric(self) -> bool: ...
|
||||
def isprintable(self) -> bool: ...
|
||||
def isspace(self) -> bool: ...
|
||||
def istitle(self) -> bool: ...
|
||||
def isupper(self) -> bool: ...
|
||||
def isascii(self) -> bool: ...
|
||||
def join(self, seq: Iterable[str]) -> str: ...
|
||||
def ljust(self, width: int, *args: Any) -> Self: ...
|
||||
def lower(self) -> Self: ...
|
||||
def lstrip(self, chars: str | None = None) -> Self: ...
|
||||
maketrans = str.maketrans
|
||||
def partition(self, sep: str) -> tuple[str, str, str]: ...
|
||||
if sys.version_info >= (3, 9):
|
||||
def removeprefix(self, prefix: str | UserString, /) -> Self: ...
|
||||
def removesuffix(self, suffix: str | UserString, /) -> Self: ...
|
||||
|
||||
def replace(self, old: str | UserString, new: str | UserString, maxsplit: int = -1) -> Self: ...
|
||||
def rfind(self, sub: str | UserString, start: int = 0, end: int = sys.maxsize) -> int: ...
|
||||
def rindex(self, sub: str | UserString, start: int = 0, end: int = sys.maxsize) -> int: ...
|
||||
def rjust(self, width: int, *args: Any) -> Self: ...
|
||||
def rpartition(self, sep: str) -> tuple[str, str, str]: ...
|
||||
def rstrip(self, chars: str | None = None) -> Self: ...
|
||||
def split(self, sep: str | None = None, maxsplit: int = -1) -> list[str]: ...
|
||||
def rsplit(self, sep: str | None = None, maxsplit: int = -1) -> list[str]: ...
|
||||
def splitlines(self, keepends: bool = False) -> list[str]: ...
|
||||
def startswith(self, prefix: str | tuple[str, ...], start: int | None = 0, end: int | None = sys.maxsize) -> bool: ...
|
||||
def strip(self, chars: str | None = None) -> Self: ...
|
||||
def swapcase(self) -> Self: ...
|
||||
def title(self) -> Self: ...
|
||||
def translate(self, *args: Any) -> Self: ...
|
||||
def upper(self) -> Self: ...
|
||||
def zfill(self, width: int) -> Self: ...
|
||||
|
||||
class deque:
|
||||
"""
|
||||
Minimal implementation of a deque that implements a FIFO buffer.
|
||||
"""
|
||||
|
||||
@property
|
||||
def maxlen(self) -> int | None: ...
|
||||
@overload
|
||||
def __init__(self, *, maxlen: int | None = None) -> None: ...
|
||||
@overload
|
||||
def __init__(self, iterable: Iterable[_T], maxlen: int | None = None) -> None: ...
|
||||
def append(self, x: _T, /) -> None: # type: ignore
|
||||
"""
|
||||
Add *x* to the right side of the deque.
|
||||
Raises ``IndexError`` if overflow checking is enabled and there is
|
||||
no more room in the queue.
|
||||
"""
|
||||
...
|
||||
|
||||
def appendleft(self, x: _T, /) -> None: # type: ignore
|
||||
"""
|
||||
Add *x* to the left side of the deque.
|
||||
Raises ``IndexError`` if overflow checking is enabled and there is
|
||||
no more room in the queue.
|
||||
"""
|
||||
...
|
||||
|
||||
def copy(self) -> Self: ...
|
||||
def count(self, x: _T, /) -> int: ... # type: ignore
|
||||
def extend(self, iterable: Iterable[_T], /) -> None:
|
||||
"""
|
||||
Extend the deque by appending all the items from *iterable* to
|
||||
the right of the deque.
|
||||
Raises ``IndexError`` if overflow checking is enabled and there is
|
||||
no more room in the deque.
|
||||
"""
|
||||
...
|
||||
|
||||
def extendleft(self, iterable: Iterable[_T], /) -> None: ...
|
||||
def insert(self, i: int, x: _T, /) -> None: ... # type: ignore
|
||||
def index(self, x: _T, start: int = 0, stop: int = ..., /) -> int: ... # type: ignore
|
||||
def pop(self) -> _T: # type: ignore
|
||||
"""
|
||||
Remove and return an item from the right side of the deque.
|
||||
Raises ``IndexError`` if no items are present.
|
||||
"""
|
||||
...
|
||||
|
||||
def popleft(self) -> _T: # type: ignore
|
||||
"""
|
||||
Remove and return an item from the left side of the deque.
|
||||
Raises ``IndexError`` if no items are present.
|
||||
"""
|
||||
...
|
||||
|
||||
def remove(self, value: _T, /) -> None: ... # type: ignore
|
||||
def rotate(self, n: int = 1, /) -> None: ...
|
||||
def __copy__(self) -> Self: ...
|
||||
def __len__(self) -> int: ...
|
||||
# These methods of deque don't take slices, unlike MutableSequence, hence the type: ignores
|
||||
def __getitem__(self, key: SupportsIndex, /) -> _T: ... # type: ignore[override] # type: ignore
|
||||
def __setitem__(self, key: SupportsIndex, value: _T, /) -> None: ... # type: ignore[override] # type: ignore
|
||||
def __delitem__(self, key: SupportsIndex, /) -> None: ... # type: ignore[override]
|
||||
def __contains__(self, key: object, /) -> bool: ...
|
||||
def __reduce__(self) -> tuple[type[Self], tuple[()], None, Iterator[_T]]: ... # type: ignore
|
||||
def __iadd__(self, value: Iterable[_T], /) -> Self: ...
|
||||
def __add__(self, value: Self, /) -> Self: ...
|
||||
def __mul__(self, value: int, /) -> Self: ...
|
||||
def __imul__(self, value: int, /) -> Self: ...
|
||||
def __lt__(self, value: deque[_T], /) -> bool: ... # type: ignore
|
||||
def __le__(self, value: deque[_T], /) -> bool: ... # type: ignore
|
||||
def __gt__(self, value: deque[_T], /) -> bool: ... # type: ignore
|
||||
def __ge__(self, value: deque[_T], /) -> bool: ... # type: ignore
|
||||
def __eq__(self, value: object, /) -> bool: ...
|
||||
if sys.version_info >= (3, 9):
|
||||
def __class_getitem__(cls, item: Any, /) -> GenericAlias: ...
|
||||
|
||||
class Counter(dict[_T, int], Generic[_T]):
|
||||
@overload
|
||||
def __init__(self, iterable: None = None, /) -> None: ...
|
||||
@overload
|
||||
def __init__(self: Counter[str], iterable: None = None, /, **kwargs: int) -> None: ...
|
||||
@overload
|
||||
def __init__(self, mapping: SupportsKeysAndGetItem[_T, int], /) -> None: ...
|
||||
@overload
|
||||
def __init__(self, iterable: Iterable[_T], /) -> None: ...
|
||||
def copy(self) -> Self: ...
|
||||
def elements(self) -> Iterator[_T]: ... # type: ignore
|
||||
def most_common(self, n: int | None = None) -> list[tuple[_T, int]]: ...
|
||||
@classmethod
|
||||
def fromkeys(cls, iterable: Any, v: int | None = None) -> NoReturn: ... # type: ignore[override]
|
||||
@overload
|
||||
def subtract(self, iterable: None = None, /) -> None: ...
|
||||
@overload
|
||||
def subtract(self, mapping: Mapping[_T, int], /) -> None: ...
|
||||
@overload
|
||||
def subtract(self, iterable: Iterable[_T], /) -> None: ...
|
||||
# Unlike dict.update(), use Mapping instead of SupportsKeysAndGetItem for the first overload
|
||||
# (source code does an `isinstance(other, Mapping)` check)
|
||||
#
|
||||
# The second overload is also deliberately different to dict.update()
|
||||
# (if it were `Iterable[_T] | Iterable[tuple[_T, int]]`,
|
||||
# the tuples would be added as keys, breaking type safety)
|
||||
@overload # type: ignore[override]
|
||||
def update(self, m: Mapping[_T, int], /, **kwargs: int) -> None: ...
|
||||
@overload
|
||||
def update(self, iterable: Iterable[_T], /, **kwargs: int) -> None: ...
|
||||
@overload
|
||||
def update(self, iterable: None = None, /, **kwargs: int) -> None: ...
|
||||
def __missing__(self, key: _T) -> int: ...
|
||||
def __delitem__(self, elem: object) -> None: ...
|
||||
if sys.version_info >= (3, 10):
|
||||
def __eq__(self, other: object) -> bool: ...
|
||||
def __ne__(self, other: object) -> bool: ...
|
||||
|
||||
def __add__(self, other: Counter[_S]) -> Counter[_T | _S]: ...
|
||||
def __sub__(self, other: Counter[_T]) -> Counter[_T]: ...
|
||||
def __and__(self, other: Counter[_T]) -> Counter[_T]: ...
|
||||
def __or__(self, other: Counter[_S]) -> Counter[_T | _S]: ... # type: ignore[override]
|
||||
def __pos__(self) -> Counter[_T]: ...
|
||||
def __neg__(self) -> Counter[_T]: ...
|
||||
# several type: ignores because __iadd__ is supposedly incompatible with __add__, etc.
|
||||
def __iadd__(self, other: SupportsItems[_T, int]) -> Self: ... # type: ignore[misc]
|
||||
def __isub__(self, other: SupportsItems[_T, int]) -> Self: ...
|
||||
def __iand__(self, other: SupportsItems[_T, int]) -> Self: ...
|
||||
def __ior__(self, other: SupportsItems[_T, int]) -> Self: ... # type: ignore[override,misc]
|
||||
if sys.version_info >= (3, 10):
|
||||
def total(self) -> int: ...
|
||||
def __le__(self, other: Counter[Any]) -> bool: ...
|
||||
def __lt__(self, other: Counter[Any]) -> bool: ...
|
||||
def __ge__(self, other: Counter[Any]) -> bool: ...
|
||||
def __gt__(self, other: Counter[Any]) -> bool: ...
|
||||
|
||||
# The pure-Python implementations of the "views" classes
|
||||
# These are exposed at runtime in `collections/__init__.py`
|
||||
class _OrderedDictKeysView(KeysView[_KT_co]):
|
||||
def __reversed__(self) -> Iterator[_KT_co]: ...
|
||||
|
||||
class _OrderedDictItemsView(ItemsView[_KT_co, _VT_co]):
|
||||
def __reversed__(self) -> Iterator[tuple[_KT_co, _VT_co]]: ...
|
||||
|
||||
class _OrderedDictValuesView(ValuesView[_VT_co]):
|
||||
def __reversed__(self) -> Iterator[_VT_co]: ...
|
||||
|
||||
# The C implementations of the "views" classes
|
||||
# (At runtime, these are called `odict_keys`, `odict_items` and `odict_values`,
|
||||
# but they are not exposed anywhere)
|
||||
# pyright doesn't have a specific error code for subclassing error!
|
||||
@final
|
||||
class _odict_keys(dict_keys[_KT_co, _VT_co]): # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues]
|
||||
def __reversed__(self) -> Iterator[_KT_co]: ...
|
||||
|
||||
@final
|
||||
class _odict_items(dict_items[_KT_co, _VT_co]): # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues]
|
||||
def __reversed__(self) -> Iterator[tuple[_KT_co, _VT_co]]: ...
|
||||
|
||||
@final
|
||||
class _odict_values(dict_values[_KT_co, _VT_co]): # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues]
|
||||
def __reversed__(self) -> Iterator[_VT_co]: ...
|
||||
|
||||
class OrderedDict(Dict[_KT, _VT], Generic[_KT, _VT]): # type: ignore
|
||||
"""
|
||||
``dict`` type subclass which remembers and preserves the order of keys
|
||||
added. When ordered dict is iterated over, keys/items are returned in
|
||||
the order they were added::
|
||||
|
||||
from collections import OrderedDict
|
||||
|
||||
# To make benefit of ordered keys, OrderedDict should be initialized
|
||||
# from sequence of (key, value) pairs.
|
||||
d = OrderedDict([("z", 1), ("a", 2)])
|
||||
# More items can be added as usual
|
||||
d["w"] = 5
|
||||
d["b"] = 3
|
||||
for k, v in d.items():
|
||||
print(k, v)
|
||||
|
||||
Output::
|
||||
|
||||
z 1
|
||||
a 2
|
||||
w 5
|
||||
b 3
|
||||
"""
|
||||
|
||||
def popitem(self, last: bool = True) -> tuple[_KT, _VT]: ... # type: ignore
|
||||
def move_to_end(self, key: _KT, last: bool = True) -> None: ... # type: ignore
|
||||
def copy(self) -> Self: ...
|
||||
def __reversed__(self) -> Iterator[_KT]: ... # type: ignore
|
||||
def keys(self) -> _odict_keys[_KT, _VT]: ... # type: ignore
|
||||
def items(self) -> _odict_items[_KT, _VT]: ... # type: ignore
|
||||
def values(self) -> _odict_values[_KT, _VT]: ... # type: ignore
|
||||
# The signature of OrderedDict.fromkeys should be kept in line with `dict.fromkeys`, modulo positional-only differences.
|
||||
# Like dict.fromkeys, its true signature is not expressible in the current type system.
|
||||
# See #3800 & https://github.com/python/typing/issues/548#issuecomment-683336963.
|
||||
@classmethod
|
||||
@overload
|
||||
def fromkeys(cls, iterable: Iterable[_T], value: None = None) -> OrderedDict[_T, Any | None]: ... # type: ignore
|
||||
@classmethod
|
||||
@overload
|
||||
def fromkeys(cls, iterable: Iterable[_T], value: _S) -> OrderedDict[_T, _S]: ... # type: ignore
|
||||
# Keep OrderedDict.setdefault in line with MutableMapping.setdefault, modulo positional-only differences.
|
||||
@overload
|
||||
def setdefault(self: OrderedDict[_KT, _T | None], key: _KT, default: None = None) -> _T | None: ... # type: ignore
|
||||
@overload
|
||||
def setdefault(self, key: _KT, default: _VT) -> _VT: ... # type: ignore
|
||||
# Same as dict.pop, but accepts keyword arguments
|
||||
@overload
|
||||
def pop(self, key: _KT) -> _VT: ... # type: ignore
|
||||
@overload
|
||||
def pop(self, key: _KT, default: _VT) -> _VT: ... # type: ignore
|
||||
@overload
|
||||
def pop(self, key: _KT, default: _T) -> _VT | _T: ... # type: ignore
|
||||
def __eq__(self, value: object, /) -> bool: ...
|
||||
if sys.version_info >= (3, 9):
|
||||
@overload
|
||||
def __or__(self, value: dict[_KT, _VT], /) -> Self: ... # type: ignore
|
||||
@overload
|
||||
def __or__(self, value: dict[_T1, _T2], /) -> OrderedDict[_KT | _T1, _VT | _T2]: ... # type: ignore
|
||||
@overload
|
||||
def __ror__(self, value: dict[_KT, _VT], /) -> Self: ... # type: ignore
|
||||
@overload
|
||||
def __ror__(self, value: dict[_T1, _T2], /) -> OrderedDict[_KT | _T1, _VT | _T2]: ... # type: ignore[misc] # type: ignore
|
||||
|
||||
class defaultdict(dict[_KT, _VT]): # type: ignore
|
||||
default_factory: Callable[[], _VT] | None
|
||||
@overload
|
||||
def __init__(self) -> None: ...
|
||||
@overload
|
||||
def __init__(self: defaultdict[str, _VT], **kwargs: _VT) -> None: ... # pyright: ignore[reportInvalidTypeVarUse] #11780
|
||||
@overload
|
||||
def __init__(self, default_factory: Callable[[], _VT] | None, /) -> None: ...
|
||||
@overload
|
||||
def __init__(
|
||||
self: defaultdict[str, _VT], # pyright: ignore[reportInvalidTypeVarUse] #11780
|
||||
default_factory: Callable[[], _VT] | None,
|
||||
/,
|
||||
**kwargs: _VT, # type: ignore
|
||||
) -> None: ...
|
||||
@overload
|
||||
def __init__(self, default_factory: Callable[[], _VT] | None, map: SupportsKeysAndGetItem[_KT, _VT], /) -> None: ... # type: ignore
|
||||
@overload
|
||||
def __init__(
|
||||
self: defaultdict[str, _VT], # pyright: ignore[reportInvalidTypeVarUse] #11780
|
||||
default_factory: Callable[[], _VT] | None,
|
||||
map: SupportsKeysAndGetItem[str, _VT],
|
||||
/,
|
||||
**kwargs: _VT, # type: ignore
|
||||
) -> None: ...
|
||||
@overload
|
||||
def __init__(self, default_factory: Callable[[], _VT] | None, iterable: Iterable[tuple[_KT, _VT]], /) -> None: ... # type: ignore
|
||||
@overload
|
||||
def __init__(
|
||||
self: defaultdict[str, _VT], # pyright: ignore[reportInvalidTypeVarUse] #11780
|
||||
default_factory: Callable[[], _VT] | None,
|
||||
iterable: Iterable[tuple[str, _VT]],
|
||||
/,
|
||||
**kwargs: _VT, # type: ignore
|
||||
) -> None: ...
|
||||
def __missing__(self, key: _KT, /) -> _VT: ... # type: ignore
|
||||
def __copy__(self) -> Self: ...
|
||||
def copy(self) -> Self: ...
|
||||
if sys.version_info >= (3, 9):
|
||||
@overload
|
||||
def __or__(self, value: dict[_KT, _VT], /) -> Self: ... # type: ignore
|
||||
@overload
|
||||
def __or__(self, value: dict[_T1, _T2], /) -> defaultdict[_KT | _T1, _VT | _T2]: ...
|
||||
@overload
|
||||
def __ror__(self, value: dict[_KT, _VT], /) -> Self: ... # type: ignore
|
||||
@overload
|
||||
def __ror__(self, value: dict[_T1, _T2], /) -> defaultdict[_KT | _T1, _VT | _T2]: ... # type: ignore[misc]
|
||||
|
||||
class ChainMap(MutableMapping[_KT, _VT]): # type: ignore
|
||||
maps: list[MutableMapping[_KT, _VT]] # type: ignore
|
||||
def __init__(self, *maps: MutableMapping[_KT, _VT]) -> None: ... # type: ignore
|
||||
def new_child(self, m: MutableMapping[_KT, _VT] | None = None) -> Self: ... # type: ignore
|
||||
@property
|
||||
def parents(self) -> Self: ...
|
||||
def __setitem__(self, key: _KT, value: _VT) -> None: ... # type: ignore
|
||||
def __delitem__(self, key: _KT) -> None: ...
|
||||
def __getitem__(self, key: _KT) -> _VT: ... # type: ignore
|
||||
def __iter__(self) -> Iterator[_KT]: ... # type: ignore
|
||||
def __len__(self) -> int: ...
|
||||
def __contains__(self, key: object) -> bool: ...
|
||||
@overload
|
||||
def get(self, key: _KT, default: None = None) -> _VT | None: ... # type: ignore
|
||||
@overload
|
||||
def get(self, key: _KT, default: _T) -> _VT | _T: ... # type: ignore
|
||||
def __missing__(self, key: _KT) -> _VT: ... # undocumented # type: ignore
|
||||
def __bool__(self) -> bool: ...
|
||||
# Keep ChainMap.setdefault in line with MutableMapping.setdefault, modulo positional-only differences.
|
||||
@overload
|
||||
def setdefault(self: ChainMap[_KT, _T | None], key: _KT, default: None = None) -> _T | None: ... # type: ignore
|
||||
@overload
|
||||
def setdefault(self, key: _KT, default: _VT) -> _VT: ... # type: ignore
|
||||
@overload
|
||||
def pop(self, key: _KT) -> _VT: ... # type: ignore
|
||||
@overload
|
||||
def pop(self, key: _KT, default: _VT) -> _VT: ... # type: ignore
|
||||
@overload
|
||||
def pop(self, key: _KT, default: _T) -> _VT | _T: ... # type: ignore
|
||||
def copy(self) -> Self: ...
|
||||
__copy__ = copy
|
||||
# All arguments to `fromkeys` are passed to `dict.fromkeys` at runtime,
|
||||
# so the signature should be kept in line with `dict.fromkeys`.
|
||||
@classmethod
|
||||
@overload
|
||||
def fromkeys(cls, iterable: Iterable[_T]) -> ChainMap[_T, Any | None]: ...
|
||||
@classmethod
|
||||
@overload
|
||||
# Special-case None: the user probably wants to add non-None values later.
|
||||
def fromkeys(cls, iterable: Iterable[_T], value: None, /) -> ChainMap[_T, Any | None]: ...
|
||||
@classmethod
|
||||
@overload
|
||||
def fromkeys(cls, iterable: Iterable[_T], value: _S, /) -> ChainMap[_T, _S]: ...
|
||||
if sys.version_info >= (3, 9):
|
||||
@overload
|
||||
def __or__(self, other: Mapping[_KT, _VT]) -> Self: ... # type: ignore
|
||||
@overload
|
||||
def __or__(self, other: Mapping[_T1, _T2]) -> ChainMap[_KT | _T1, _VT | _T2]: ...
|
||||
@overload
|
||||
def __ror__(self, other: Mapping[_KT, _VT]) -> Self: ... # type: ignore
|
||||
@overload
|
||||
def __ror__(self, other: Mapping[_T1, _T2]) -> ChainMap[_KT | _T1, _VT | _T2]: ...
|
||||
# ChainMap.__ior__ should be kept roughly in line with MutableMapping.update()
|
||||
@overload # type: ignore[misc]
|
||||
def __ior__(self, other: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ... # type: ignore
|
||||
@overload
|
||||
def __ior__(self, other: Iterable[tuple[_KT, _VT]]) -> Self: ... # type: ignore
|
||||
@@ -0,0 +1,2 @@
|
||||
from _collections_abc import *
|
||||
from _collections_abc import __all__ as __all__
|
||||
339
.venv/lib/python3.12/site-packages/stdlib/enum.pyi
Normal file
339
.venv/lib/python3.12/site-packages/stdlib/enum.pyi
Normal file
@@ -0,0 +1,339 @@
|
||||
import _typeshed
|
||||
import sys
|
||||
import types
|
||||
from _typeshed import SupportsKeysAndGetItem, Unused
|
||||
from builtins import property as _builtins_property
|
||||
from collections.abc import Callable, Iterable, Iterator, Mapping
|
||||
from typing import Any, Generic, Literal, TypeVar, overload
|
||||
from typing_extensions import Self, TypeAlias
|
||||
|
||||
__all__ = ["EnumMeta", "Enum", "IntEnum", "Flag", "IntFlag", "auto", "unique"]
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
__all__ += [
|
||||
"CONFORM",
|
||||
"CONTINUOUS",
|
||||
"EJECT",
|
||||
"EnumCheck",
|
||||
"EnumType",
|
||||
"FlagBoundary",
|
||||
"KEEP",
|
||||
"NAMED_FLAGS",
|
||||
"ReprEnum",
|
||||
"STRICT",
|
||||
"StrEnum",
|
||||
"UNIQUE",
|
||||
"global_enum",
|
||||
"global_enum_repr",
|
||||
"global_flag_repr",
|
||||
"global_str",
|
||||
"member",
|
||||
"nonmember",
|
||||
"property",
|
||||
"verify",
|
||||
"pickle_by_enum_name",
|
||||
"pickle_by_global_name",
|
||||
]
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
__all__ += ["EnumDict"]
|
||||
|
||||
_EnumMemberT = TypeVar("_EnumMemberT")
|
||||
_EnumerationT = TypeVar("_EnumerationT", bound=type[Enum])
|
||||
|
||||
# The following all work:
|
||||
# >>> from enum import Enum
|
||||
# >>> from string import ascii_lowercase
|
||||
# >>> Enum('Foo', names='RED YELLOW GREEN')
|
||||
# <enum 'Foo'>
|
||||
# >>> Enum('Foo', names=[('RED', 1), ('YELLOW, 2)])
|
||||
# <enum 'Foo'>
|
||||
# >>> Enum('Foo', names=((x for x in (ascii_lowercase[i], i)) for i in range(5)))
|
||||
# <enum 'Foo'>
|
||||
# >>> Enum('Foo', names={'RED': 1, 'YELLOW': 2})
|
||||
# <enum 'Foo'>
|
||||
_EnumNames: TypeAlias = str | Iterable[str] | Iterable[Iterable[str | Any]] | Mapping[str, Any]
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
class nonmember(Generic[_EnumMemberT]):
|
||||
value: _EnumMemberT
|
||||
def __init__(self, value: _EnumMemberT) -> None: ...
|
||||
|
||||
class member(Generic[_EnumMemberT]):
|
||||
value: _EnumMemberT
|
||||
def __init__(self, value: _EnumMemberT) -> None: ...
|
||||
|
||||
class _EnumDict(dict[str, Any]):
|
||||
def __init__(self) -> None: ...
|
||||
def __setitem__(self, key: str, value: Any) -> None: ...
|
||||
if sys.version_info >= (3, 11):
|
||||
# See comment above `typing.MutableMapping.update`
|
||||
# for why overloads are preferable to a Union here
|
||||
#
|
||||
# Unlike with MutableMapping.update(), the first argument is required,
|
||||
# hence the type: ignore
|
||||
@overload # type: ignore[override]
|
||||
def update(self, members: SupportsKeysAndGetItem[str, Any], **more_members: Any) -> None: ...
|
||||
@overload
|
||||
def update(self, members: Iterable[tuple[str, Any]], **more_members: Any) -> None: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
@property
|
||||
def member_names(self) -> list[str]: ...
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
EnumDict = _EnumDict
|
||||
|
||||
# Structurally: Iterable[T], Reversible[T], Container[T] where T is the enum itself
|
||||
class EnumMeta(type):
|
||||
if sys.version_info >= (3, 11):
|
||||
def __new__(
|
||||
metacls: type[_typeshed.Self],
|
||||
cls: str,
|
||||
bases: tuple[type, ...],
|
||||
classdict: _EnumDict,
|
||||
*,
|
||||
boundary: FlagBoundary | None = None,
|
||||
_simple: bool = False,
|
||||
**kwds: Any,
|
||||
) -> _typeshed.Self: ...
|
||||
elif sys.version_info >= (3, 9):
|
||||
def __new__(
|
||||
metacls: type[_typeshed.Self], cls: str, bases: tuple[type, ...], classdict: _EnumDict, **kwds: Any
|
||||
) -> _typeshed.Self: ...
|
||||
else:
|
||||
def __new__(metacls: type[_typeshed.Self], cls: str, bases: tuple[type, ...], classdict: _EnumDict) -> _typeshed.Self: ...
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
@classmethod
|
||||
def __prepare__(metacls, cls: str, bases: tuple[type, ...], **kwds: Any) -> _EnumDict: ... # type: ignore[override]
|
||||
else:
|
||||
@classmethod
|
||||
def __prepare__(metacls, cls: str, bases: tuple[type, ...]) -> _EnumDict: ... # type: ignore[override]
|
||||
|
||||
def __iter__(self: type[_EnumMemberT]) -> Iterator[_EnumMemberT]: ...
|
||||
def __reversed__(self: type[_EnumMemberT]) -> Iterator[_EnumMemberT]: ...
|
||||
if sys.version_info >= (3, 12):
|
||||
def __contains__(self: type[Any], value: object) -> bool: ...
|
||||
elif sys.version_info >= (3, 11):
|
||||
def __contains__(self: type[Any], member: object) -> bool: ...
|
||||
elif sys.version_info >= (3, 10):
|
||||
def __contains__(self: type[Any], obj: object) -> bool: ...
|
||||
else:
|
||||
def __contains__(self: type[Any], member: object) -> bool: ...
|
||||
|
||||
def __getitem__(self: type[_EnumMemberT], name: str) -> _EnumMemberT: ...
|
||||
@_builtins_property
|
||||
def __members__(self: type[_EnumMemberT]) -> types.MappingProxyType[str, _EnumMemberT]: ...
|
||||
def __len__(self) -> int: ...
|
||||
def __bool__(self) -> Literal[True]: ...
|
||||
def __dir__(self) -> list[str]: ...
|
||||
|
||||
# Overload 1: Value lookup on an already existing enum class (simple case)
|
||||
@overload
|
||||
def __call__(cls: type[_EnumMemberT], value: Any, names: None = None) -> _EnumMemberT: ...
|
||||
|
||||
# Overload 2: Functional API for constructing new enum classes.
|
||||
if sys.version_info >= (3, 11):
|
||||
@overload
|
||||
def __call__(
|
||||
cls,
|
||||
value: str,
|
||||
names: _EnumNames,
|
||||
*,
|
||||
module: str | None = None,
|
||||
qualname: str | None = None,
|
||||
type: type | None = None,
|
||||
start: int = 1,
|
||||
boundary: FlagBoundary | None = None,
|
||||
) -> type[Enum]: ...
|
||||
else:
|
||||
@overload
|
||||
def __call__(
|
||||
cls,
|
||||
value: str,
|
||||
names: _EnumNames,
|
||||
*,
|
||||
module: str | None = None,
|
||||
qualname: str | None = None,
|
||||
type: type | None = None,
|
||||
start: int = 1,
|
||||
) -> type[Enum]: ...
|
||||
|
||||
# Overload 3 (py312+ only): Value lookup on an already existing enum class (complex case)
|
||||
#
|
||||
# >>> class Foo(enum.Enum):
|
||||
# ... X = 1, 2, 3
|
||||
# >>> Foo(1, 2, 3)
|
||||
# <Foo.X: (1, 2, 3)>
|
||||
#
|
||||
if sys.version_info >= (3, 12):
|
||||
@overload
|
||||
def __call__(cls: type[_EnumMemberT], value: Any, *values: Any) -> _EnumMemberT: ...
|
||||
|
||||
_member_names_: list[str] # undocumented
|
||||
_member_map_: dict[str, Enum] # undocumented
|
||||
_value2member_map_: dict[Any, Enum] # undocumented
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
# In 3.11 `EnumMeta` metaclass is renamed to `EnumType`, but old name also exists.
|
||||
EnumType = EnumMeta
|
||||
|
||||
class property(types.DynamicClassAttribute):
|
||||
def __set_name__(self, ownerclass: type[Enum], name: str) -> None: ...
|
||||
name: str
|
||||
clsname: str
|
||||
member: Enum | None
|
||||
|
||||
_magic_enum_attr = property
|
||||
else:
|
||||
_magic_enum_attr = types.DynamicClassAttribute
|
||||
|
||||
class Enum(metaclass=EnumMeta):
|
||||
@_magic_enum_attr
|
||||
def name(self) -> str: ...
|
||||
@_magic_enum_attr
|
||||
def value(self) -> Any: ...
|
||||
_name_: str
|
||||
_value_: Any
|
||||
_ignore_: str | list[str]
|
||||
_order_: str
|
||||
__order__: str
|
||||
@classmethod
|
||||
def _missing_(cls, value: object) -> Any: ...
|
||||
@staticmethod
|
||||
def _generate_next_value_(name: str, start: int, count: int, last_values: list[Any]) -> Any: ...
|
||||
# It's not true that `__new__` will accept any argument type,
|
||||
# so ideally we'd use `Any` to indicate that the argument type is inexpressible.
|
||||
# However, using `Any` causes too many false-positives for those using mypy's `--disallow-any-expr`
|
||||
# (see #7752, #2539, mypy/#5788),
|
||||
# and in practice using `object` here has the same effect as using `Any`.
|
||||
def __new__(cls, value: object) -> Self: ...
|
||||
def __dir__(self) -> list[str]: ...
|
||||
def __hash__(self) -> int: ...
|
||||
def __format__(self, format_spec: str) -> str: ...
|
||||
def __reduce_ex__(self, proto: Unused) -> tuple[Any, ...]: ...
|
||||
if sys.version_info >= (3, 11):
|
||||
def __copy__(self) -> Self: ...
|
||||
def __deepcopy__(self, memo: Any) -> Self: ...
|
||||
if sys.version_info >= (3, 12):
|
||||
@classmethod
|
||||
def __signature__(cls) -> str: ...
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
class ReprEnum(Enum): ...
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
_IntEnumBase = ReprEnum
|
||||
else:
|
||||
_IntEnumBase = Enum
|
||||
|
||||
class IntEnum(int, _IntEnumBase):
|
||||
_value_: int
|
||||
@_magic_enum_attr
|
||||
def value(self) -> int: ...
|
||||
def __new__(cls, value: int) -> Self: ...
|
||||
|
||||
def unique(enumeration: _EnumerationT) -> _EnumerationT: ...
|
||||
|
||||
_auto_null: Any
|
||||
|
||||
class Flag(Enum):
|
||||
_name_: str | None # type: ignore[assignment]
|
||||
_value_: int
|
||||
@_magic_enum_attr
|
||||
def name(self) -> str | None: ... # type: ignore[override]
|
||||
@_magic_enum_attr
|
||||
def value(self) -> int: ...
|
||||
def __contains__(self, other: Self) -> bool: ...
|
||||
def __bool__(self) -> bool: ...
|
||||
def __or__(self, other: Self) -> Self: ...
|
||||
def __and__(self, other: Self) -> Self: ...
|
||||
def __xor__(self, other: Self) -> Self: ...
|
||||
def __invert__(self) -> Self: ...
|
||||
if sys.version_info >= (3, 11):
|
||||
def __iter__(self) -> Iterator[Self]: ...
|
||||
def __len__(self) -> int: ...
|
||||
__ror__ = __or__
|
||||
__rand__ = __and__
|
||||
__rxor__ = __xor__
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
class StrEnum(str, ReprEnum):
|
||||
def __new__(cls, value: str) -> Self: ...
|
||||
_value_: str
|
||||
@_magic_enum_attr
|
||||
def value(self) -> str: ...
|
||||
@staticmethod
|
||||
def _generate_next_value_(name: str, start: int, count: int, last_values: list[str]) -> str: ...
|
||||
|
||||
class EnumCheck(StrEnum):
|
||||
CONTINUOUS = "no skipped integer values"
|
||||
NAMED_FLAGS = "multi-flag aliases may not contain unnamed flags"
|
||||
UNIQUE = "one name per value"
|
||||
|
||||
CONTINUOUS = EnumCheck.CONTINUOUS
|
||||
NAMED_FLAGS = EnumCheck.NAMED_FLAGS
|
||||
UNIQUE = EnumCheck.UNIQUE
|
||||
|
||||
class verify:
|
||||
def __init__(self, *checks: EnumCheck) -> None: ...
|
||||
def __call__(self, enumeration: _EnumerationT) -> _EnumerationT: ...
|
||||
|
||||
class FlagBoundary(StrEnum):
|
||||
STRICT = "strict"
|
||||
CONFORM = "conform"
|
||||
EJECT = "eject"
|
||||
KEEP = "keep"
|
||||
|
||||
STRICT = FlagBoundary.STRICT
|
||||
CONFORM = FlagBoundary.CONFORM
|
||||
EJECT = FlagBoundary.EJECT
|
||||
KEEP = FlagBoundary.KEEP
|
||||
|
||||
def global_str(self: Enum) -> str: ...
|
||||
def global_enum(cls: _EnumerationT, update_str: bool = False) -> _EnumerationT: ...
|
||||
def global_enum_repr(self: Enum) -> str: ...
|
||||
def global_flag_repr(self: Flag) -> str: ...
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
# The body of the class is the same, but the base classes are different.
|
||||
class IntFlag(int, ReprEnum, Flag, boundary=KEEP): # type: ignore[misc] # complaints about incompatible bases
|
||||
def __new__(cls, value: int) -> Self: ...
|
||||
def __or__(self, other: int) -> Self: ...
|
||||
def __and__(self, other: int) -> Self: ...
|
||||
def __xor__(self, other: int) -> Self: ...
|
||||
__ror__ = __or__
|
||||
__rand__ = __and__
|
||||
__rxor__ = __xor__
|
||||
|
||||
else:
|
||||
class IntFlag(int, Flag): # type: ignore[misc] # complaints about incompatible bases
|
||||
def __new__(cls, value: int) -> Self: ...
|
||||
def __or__(self, other: int) -> Self: ...
|
||||
def __and__(self, other: int) -> Self: ...
|
||||
def __xor__(self, other: int) -> Self: ...
|
||||
__ror__ = __or__
|
||||
__rand__ = __and__
|
||||
__rxor__ = __xor__
|
||||
|
||||
class auto:
|
||||
_value_: Any
|
||||
@_magic_enum_attr
|
||||
def value(self) -> Any: ...
|
||||
def __new__(cls) -> Self: ...
|
||||
|
||||
# These don't exist, but auto is basically immediately replaced with
|
||||
# either an int or a str depending on the type of the enum. StrEnum's auto
|
||||
# shouldn't have these, but they're needed for int versions of auto (mostly the __or__).
|
||||
# Ideally type checkers would special case auto enough to handle this,
|
||||
# but until then this is a slightly inaccurate helping hand.
|
||||
def __or__(self, other: int | Self) -> Self: ...
|
||||
def __and__(self, other: int | Self) -> Self: ...
|
||||
def __xor__(self, other: int | Self) -> Self: ...
|
||||
__ror__ = __or__
|
||||
__rand__ = __and__
|
||||
__rxor__ = __xor__
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
def pickle_by_global_name(self: Enum, proto: int) -> str: ...
|
||||
def pickle_by_enum_name(self: _EnumMemberT, proto: int) -> tuple[Callable[..., Any], tuple[type[_EnumMemberT], str]]: ...
|
||||
999
.venv/lib/python3.12/site-packages/stdlib/io.pyi
Normal file
999
.venv/lib/python3.12/site-packages/stdlib/io.pyi
Normal file
@@ -0,0 +1,999 @@
|
||||
"""
|
||||
Input/output streams.
|
||||
|
||||
MicroPython module: https://docs.micropython.org/en/v1.26.0/library/io.html
|
||||
|
||||
CPython module: :mod:`python:io` https://docs.python.org/3/library/io.html .
|
||||
|
||||
This module contains additional types of `stream` (file-like) objects
|
||||
and helper functions.
|
||||
|
||||
Conceptual hierarchy
|
||||
--------------------
|
||||
|
||||
Admonition:Difference to CPython
|
||||
:class: attention
|
||||
|
||||
Conceptual hierarchy of stream base classes is simplified in MicroPython,
|
||||
as described in this section.
|
||||
|
||||
(Abstract) base stream classes, which serve as a foundation for behaviour
|
||||
of all the concrete classes, adhere to few dichotomies (pair-wise
|
||||
classifications) in CPython. In MicroPython, they are somewhat simplified
|
||||
and made implicit to achieve higher efficiencies and save resources.
|
||||
|
||||
An important dichotomy in CPython is unbuffered vs buffered streams. In
|
||||
MicroPython, all streams are currently unbuffered. This is because all
|
||||
modern OSes, and even many RTOSes and filesystem drivers already perform
|
||||
buffering on their side. Adding another layer of buffering is counter-
|
||||
productive (an issue known as "bufferbloat") and takes precious memory.
|
||||
Note that there still cases where buffering may be useful, so we may
|
||||
introduce optional buffering support at a later time.
|
||||
|
||||
But in CPython, another important dichotomy is tied with "bufferedness" -
|
||||
it's whether a stream may incur short read/writes or not. A short read
|
||||
is when a user asks e.g. 10 bytes from a stream, but gets less, similarly
|
||||
for writes. In CPython, unbuffered streams are automatically short
|
||||
operation susceptible, while buffered are guarantee against them. The
|
||||
no short read/writes is an important trait, as it allows to develop
|
||||
more concise and efficient programs - something which is highly desirable
|
||||
for MicroPython. So, while MicroPython doesn't support buffered streams,
|
||||
it still provides for no-short-operations streams. Whether there will
|
||||
be short operations or not depends on each particular class' needs, but
|
||||
developers are strongly advised to favour no-short-operations behaviour
|
||||
for the reasons stated above. For example, MicroPython sockets are
|
||||
guaranteed to avoid short read/writes. Actually, at this time, there is
|
||||
no example of a short-operations stream class in the core, and one would
|
||||
be a port-specific class, where such a need is governed by hardware
|
||||
peculiarities.
|
||||
|
||||
The no-short-operations behaviour gets tricky in case of non-blocking
|
||||
streams, blocking vs non-blocking behaviour being another CPython dichotomy,
|
||||
fully supported by MicroPython. Non-blocking streams never wait for
|
||||
data either to arrive or be written - they read/write whatever possible,
|
||||
or signal lack of data (or ability to write data). Clearly, this conflicts
|
||||
with "no-short-operations" policy, and indeed, a case of non-blocking
|
||||
buffered (and this no-short-ops) streams is convoluted in CPython - in
|
||||
some places, such combination is prohibited, in some it's undefined or
|
||||
just not documented, in some cases it raises verbose exceptions. The
|
||||
matter is much simpler in MicroPython: non-blocking stream are important
|
||||
for efficient asynchronous operations, so this property prevails on
|
||||
the "no-short-ops" one. So, while blocking streams will avoid short
|
||||
reads/writes whenever possible (the only case to get a short read is
|
||||
if end of file is reached, or in case of error (but errors don't
|
||||
return short data, but raise exceptions)), non-blocking streams may
|
||||
produce short data to avoid blocking the operation.
|
||||
|
||||
The final dichotomy is binary vs text streams. MicroPython of course
|
||||
supports these, but while in CPython text streams are inherently
|
||||
buffered, they aren't in MicroPython. (Indeed, that's one of the cases
|
||||
for which we may introduce buffering support.)
|
||||
|
||||
Note that for efficiency, MicroPython doesn't provide abstract base
|
||||
classes corresponding to the hierarchy above, and it's not possible
|
||||
to implement, or subclass, a stream class in pure Python.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
import abc
|
||||
import sys
|
||||
from _io import (
|
||||
DEFAULT_BUFFER_SIZE as DEFAULT_BUFFER_SIZE,
|
||||
BlockingIOError as BlockingIOError,
|
||||
BufferedRandom as BufferedRandom,
|
||||
BufferedReader as BufferedReader,
|
||||
BufferedRWPair as BufferedRWPair,
|
||||
BufferedWriter as BufferedWriter,
|
||||
BytesIO as BytesIO,
|
||||
FileIO as FileIO,
|
||||
IncrementalNewlineDecoder as IncrementalNewlineDecoder,
|
||||
StringIO as StringIO,
|
||||
TextIOWrapper as TextIOWrapper,
|
||||
_BufferedIOBase,
|
||||
_IOBase,
|
||||
_RawIOBase,
|
||||
_TextIOBase,
|
||||
_WrappedBuffer as _WrappedBuffer, # used elsewhere in typeshed
|
||||
open as open,
|
||||
open_code as open_code,
|
||||
)
|
||||
from typing import overload
|
||||
from _mpy_shed import AnyReadableBuf, AnyWritableBuf, FileIO, IOBase_mp, PathLike, TextIOWrapper
|
||||
from _mpy_shed.io_modes import _OpenBinaryMode, _OpenTextModeWriting
|
||||
from _typeshed import Incomplete
|
||||
from array import array
|
||||
from typing_extensions import Awaitable, TypeAlias, TypeVar
|
||||
|
||||
__all__ = [
|
||||
"BlockingIOError",
|
||||
"open",
|
||||
"open_code",
|
||||
"IOBase",
|
||||
"RawIOBase",
|
||||
"FileIO",
|
||||
"BytesIO",
|
||||
"StringIO",
|
||||
"BufferedIOBase",
|
||||
"BufferedReader",
|
||||
"BufferedWriter",
|
||||
"BufferedRWPair",
|
||||
"BufferedRandom",
|
||||
"TextIOBase",
|
||||
"TextIOWrapper",
|
||||
"UnsupportedOperation",
|
||||
"SEEK_SET",
|
||||
"SEEK_CUR",
|
||||
"SEEK_END",
|
||||
]
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
from _io import text_encoding as text_encoding
|
||||
|
||||
__all__ += ["DEFAULT_BUFFER_SIZE", "IncrementalNewlineDecoder", "text_encoding"]
|
||||
|
||||
# SEEK_SET: Final = 0
|
||||
# SEEK_CUR: Final = 1
|
||||
# SEEK_END: Final = 2
|
||||
_T = TypeVar("_T")
|
||||
AnyStr_co = TypeVar("AnyStr_co", str, bytes, covariant=True)
|
||||
StrOrBytesPath = TypeVar("StrOrBytesPath", str, bytes, PathLike[str], PathLike[bytes])
|
||||
_OpenFile = TypeVar("_OpenFile", str, bytes, PathLike[str], PathLike[bytes], int)
|
||||
AnyReadableBuf = TypeVar("AnyReadableBuf", bytearray, array, memoryview, bytes)
|
||||
AnyWritableBuf = TypeVar("AnyWritableBuf", bytearray, array, memoryview)
|
||||
_Self = TypeVar("_Self")
|
||||
|
||||
class UnsupportedOperation(OSError, ValueError): ...
|
||||
class IOBase(_IOBase, metaclass=abc.ABCMeta): ...
|
||||
class RawIOBase(_RawIOBase, IOBase): ...
|
||||
class BufferedIOBase(_BufferedIOBase, IOBase): ...
|
||||
class TextIOBase(_TextIOBase, IOBase): ...
|
||||
|
||||
class StringIO(IOBase_mp):
|
||||
"""
|
||||
Str stream from a str (wrapper).
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __init__(self, string: str = "", /):
|
||||
"""
|
||||
|
||||
In-memory file-like object for input/output.
|
||||
`StringIO` is used for text-mode I/O (similar to a normal file opened with "t" modifier).
|
||||
Initial contents can be specified with `string` parameter.
|
||||
|
||||
`alloc_size` constructor creates an empty `StringIO` object,
|
||||
pre-allocated to hold up to `alloc_size` number of bytes.
|
||||
That means that writing that amount of bytes won't lead to reallocation of the buffer,
|
||||
and thus won't hit out-of-memory situation or lead to memory fragmentation.
|
||||
This constructor is a MicroPython extension and is recommended for usage only in special
|
||||
cases and in system-level libraries, not for end-user applications.
|
||||
|
||||
.. admonition:: Difference to CPython
|
||||
:class: attention
|
||||
|
||||
This constructor is a MicroPython extension.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __init__(self, alloc_size: int, /):
|
||||
"""
|
||||
|
||||
In-memory file-like object for input/output.
|
||||
`StringIO` is used for text-mode I/O (similar to a normal file opened with "t" modifier).
|
||||
Initial contents can be specified with `string` parameter.
|
||||
|
||||
`alloc_size` constructor creates an empty `StringIO` object,
|
||||
pre-allocated to hold up to `alloc_size` number of bytes.
|
||||
That means that writing that amount of bytes won't lead to reallocation of the buffer,
|
||||
and thus won't hit out-of-memory situation or lead to memory fragmentation.
|
||||
This constructor is a MicroPython extension and is recommended for usage only in special
|
||||
cases and in system-level libraries, not for end-user applications.
|
||||
|
||||
.. admonition:: Difference to CPython
|
||||
:class: attention
|
||||
|
||||
This constructor is a MicroPython extension.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __init__(self, string: str = "", /):
|
||||
"""
|
||||
|
||||
In-memory file-like object for input/output.
|
||||
`StringIO` is used for text-mode I/O (similar to a normal file opened with "t" modifier).
|
||||
Initial contents can be specified with `string` parameter.
|
||||
|
||||
`alloc_size` constructor creates an empty `StringIO` object,
|
||||
pre-allocated to hold up to `alloc_size` number of bytes.
|
||||
That means that writing that amount of bytes won't lead to reallocation of the buffer,
|
||||
and thus won't hit out-of-memory situation or lead to memory fragmentation.
|
||||
This constructor is a MicroPython extension and is recommended for usage only in special
|
||||
cases and in system-level libraries, not for end-user applications.
|
||||
|
||||
.. admonition:: Difference to CPython
|
||||
:class: attention
|
||||
|
||||
This constructor is a MicroPython extension.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __init__(self, alloc_size: int, /):
|
||||
"""
|
||||
|
||||
In-memory file-like object for input/output.
|
||||
`StringIO` is used for text-mode I/O (similar to a normal file opened with "t" modifier).
|
||||
Initial contents can be specified with `string` parameter.
|
||||
|
||||
`alloc_size` constructor creates an empty `StringIO` object,
|
||||
pre-allocated to hold up to `alloc_size` number of bytes.
|
||||
That means that writing that amount of bytes won't lead to reallocation of the buffer,
|
||||
and thus won't hit out-of-memory situation or lead to memory fragmentation.
|
||||
This constructor is a MicroPython extension and is recommended for usage only in special
|
||||
cases and in system-level libraries, not for end-user applications.
|
||||
|
||||
.. admonition:: Difference to CPython
|
||||
:class: attention
|
||||
|
||||
This constructor is a MicroPython extension.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __init__(self, string: str = "", /):
|
||||
"""
|
||||
|
||||
In-memory file-like object for input/output.
|
||||
`StringIO` is used for text-mode I/O (similar to a normal file opened with "t" modifier).
|
||||
Initial contents can be specified with `string` parameter.
|
||||
|
||||
`alloc_size` constructor creates an empty `StringIO` object,
|
||||
pre-allocated to hold up to `alloc_size` number of bytes.
|
||||
That means that writing that amount of bytes won't lead to reallocation of the buffer,
|
||||
and thus won't hit out-of-memory situation or lead to memory fragmentation.
|
||||
This constructor is a MicroPython extension and is recommended for usage only in special
|
||||
cases and in system-level libraries, not for end-user applications.
|
||||
|
||||
.. admonition:: Difference to CPython
|
||||
:class: attention
|
||||
|
||||
This constructor is a MicroPython extension.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __init__(self, alloc_size: int, /):
|
||||
"""
|
||||
|
||||
In-memory file-like object for input/output.
|
||||
`StringIO` is used for text-mode I/O (similar to a normal file opened with "t" modifier).
|
||||
Initial contents can be specified with `string` parameter.
|
||||
|
||||
`alloc_size` constructor creates an empty `StringIO` object,
|
||||
pre-allocated to hold up to `alloc_size` number of bytes.
|
||||
That means that writing that amount of bytes won't lead to reallocation of the buffer,
|
||||
and thus won't hit out-of-memory situation or lead to memory fragmentation.
|
||||
This constructor is a MicroPython extension and is recommended for usage only in special
|
||||
cases and in system-level libraries, not for end-user applications.
|
||||
|
||||
.. admonition:: Difference to CPython
|
||||
:class: attention
|
||||
|
||||
This constructor is a MicroPython extension.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __init__(self, string: str = "", /):
|
||||
"""
|
||||
|
||||
In-memory file-like object for input/output.
|
||||
`StringIO` is used for text-mode I/O (similar to a normal file opened with "t" modifier).
|
||||
Initial contents can be specified with `string` parameter.
|
||||
|
||||
`alloc_size` constructor creates an empty `StringIO` object,
|
||||
pre-allocated to hold up to `alloc_size` number of bytes.
|
||||
That means that writing that amount of bytes won't lead to reallocation of the buffer,
|
||||
and thus won't hit out-of-memory situation or lead to memory fragmentation.
|
||||
This constructor is a MicroPython extension and is recommended for usage only in special
|
||||
cases and in system-level libraries, not for end-user applications.
|
||||
|
||||
.. admonition:: Difference to CPython
|
||||
:class: attention
|
||||
|
||||
This constructor is a MicroPython extension.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __init__(self, alloc_size: int, /):
|
||||
"""
|
||||
|
||||
In-memory file-like object for input/output.
|
||||
`StringIO` is used for text-mode I/O (similar to a normal file opened with "t" modifier).
|
||||
Initial contents can be specified with `string` parameter.
|
||||
|
||||
`alloc_size` constructor creates an empty `StringIO` object,
|
||||
pre-allocated to hold up to `alloc_size` number of bytes.
|
||||
That means that writing that amount of bytes won't lead to reallocation of the buffer,
|
||||
and thus won't hit out-of-memory situation or lead to memory fragmentation.
|
||||
This constructor is a MicroPython extension and is recommended for usage only in special
|
||||
cases and in system-level libraries, not for end-user applications.
|
||||
|
||||
.. admonition:: Difference to CPython
|
||||
:class: attention
|
||||
|
||||
This constructor is a MicroPython extension.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __init__(self, string: str = "", /):
|
||||
"""
|
||||
|
||||
In-memory file-like object for input/output.
|
||||
`StringIO` is used for text-mode I/O (similar to a normal file opened with "t" modifier).
|
||||
Initial contents can be specified with `string` parameter.
|
||||
|
||||
`alloc_size` constructor creates an empty `StringIO` object,
|
||||
pre-allocated to hold up to `alloc_size` number of bytes.
|
||||
That means that writing that amount of bytes won't lead to reallocation of the buffer,
|
||||
and thus won't hit out-of-memory situation or lead to memory fragmentation.
|
||||
This constructor is a MicroPython extension and is recommended for usage only in special
|
||||
cases and in system-level libraries, not for end-user applications.
|
||||
|
||||
.. admonition:: Difference to CPython
|
||||
:class: attention
|
||||
|
||||
This constructor is a MicroPython extension.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __init__(self, alloc_size: int, /):
|
||||
"""
|
||||
|
||||
In-memory file-like object for input/output.
|
||||
`StringIO` is used for text-mode I/O (similar to a normal file opened with "t" modifier).
|
||||
Initial contents can be specified with `string` parameter.
|
||||
|
||||
`alloc_size` constructor creates an empty `StringIO` object,
|
||||
pre-allocated to hold up to `alloc_size` number of bytes.
|
||||
That means that writing that amount of bytes won't lead to reallocation of the buffer,
|
||||
and thus won't hit out-of-memory situation or lead to memory fragmentation.
|
||||
This constructor is a MicroPython extension and is recommended for usage only in special
|
||||
cases and in system-level libraries, not for end-user applications.
|
||||
|
||||
.. admonition:: Difference to CPython
|
||||
:class: attention
|
||||
|
||||
This constructor is a MicroPython extension.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __init__(self, string: str = "", /):
|
||||
"""
|
||||
|
||||
In-memory file-like object for input/output.
|
||||
`StringIO` is used for text-mode I/O (similar to a normal file opened with "t" modifier).
|
||||
Initial contents can be specified with `string` parameter.
|
||||
|
||||
`alloc_size` constructor creates an empty `StringIO` object,
|
||||
pre-allocated to hold up to `alloc_size` number of bytes.
|
||||
That means that writing that amount of bytes won't lead to reallocation of the buffer,
|
||||
and thus won't hit out-of-memory situation or lead to memory fragmentation.
|
||||
This constructor is a MicroPython extension and is recommended for usage only in special
|
||||
cases and in system-level libraries, not for end-user applications.
|
||||
|
||||
.. admonition:: Difference to CPython
|
||||
:class: attention
|
||||
|
||||
This constructor is a MicroPython extension.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __init__(self, alloc_size: int, /):
|
||||
"""
|
||||
|
||||
In-memory file-like object for input/output.
|
||||
`StringIO` is used for text-mode I/O (similar to a normal file opened with "t" modifier).
|
||||
Initial contents can be specified with `string` parameter.
|
||||
|
||||
`alloc_size` constructor creates an empty `StringIO` object,
|
||||
pre-allocated to hold up to `alloc_size` number of bytes.
|
||||
That means that writing that amount of bytes won't lead to reallocation of the buffer,
|
||||
and thus won't hit out-of-memory situation or lead to memory fragmentation.
|
||||
This constructor is a MicroPython extension and is recommended for usage only in special
|
||||
cases and in system-level libraries, not for end-user applications.
|
||||
|
||||
.. admonition:: Difference to CPython
|
||||
:class: attention
|
||||
|
||||
This constructor is a MicroPython extension.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __init__(self, string: str = "", /):
|
||||
"""
|
||||
|
||||
In-memory file-like object for input/output.
|
||||
`StringIO` is used for text-mode I/O (similar to a normal file opened with "t" modifier).
|
||||
Initial contents can be specified with `string` parameter.
|
||||
|
||||
`alloc_size` constructor creates an empty `StringIO` object,
|
||||
pre-allocated to hold up to `alloc_size` number of bytes.
|
||||
That means that writing that amount of bytes won't lead to reallocation of the buffer,
|
||||
and thus won't hit out-of-memory situation or lead to memory fragmentation.
|
||||
This constructor is a MicroPython extension and is recommended for usage only in special
|
||||
cases and in system-level libraries, not for end-user applications.
|
||||
|
||||
.. admonition:: Difference to CPython
|
||||
:class: attention
|
||||
|
||||
This constructor is a MicroPython extension.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __init__(self, alloc_size: int, /):
|
||||
"""
|
||||
|
||||
In-memory file-like object for input/output.
|
||||
`StringIO` is used for text-mode I/O (similar to a normal file opened with "t" modifier).
|
||||
Initial contents can be specified with `string` parameter.
|
||||
|
||||
`alloc_size` constructor creates an empty `StringIO` object,
|
||||
pre-allocated to hold up to `alloc_size` number of bytes.
|
||||
That means that writing that amount of bytes won't lead to reallocation of the buffer,
|
||||
and thus won't hit out-of-memory situation or lead to memory fragmentation.
|
||||
This constructor is a MicroPython extension and is recommended for usage only in special
|
||||
cases and in system-level libraries, not for end-user applications.
|
||||
|
||||
.. admonition:: Difference to CPython
|
||||
:class: attention
|
||||
|
||||
This constructor is a MicroPython extension.
|
||||
"""
|
||||
|
||||
class BytesIO(IOBase_mp):
|
||||
"""
|
||||
Bytes stream from a bytes array (wrapper).
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __init__(self, string: bytes = b"", /):
|
||||
"""
|
||||
In-memory file-like objects for input/output. `StringIO` is used for
|
||||
text-mode I/O (similar to a normal file opened with "t" modifier).
|
||||
`BytesIO` is used for binary-mode I/O (similar to a normal file
|
||||
opened with "b" modifier). Initial contents of file-like objects
|
||||
can be specified with *string* parameter (should be normal string
|
||||
for `StringIO` or bytes object for `BytesIO`). All the usual file
|
||||
methods like ``read()``, ``write()``, ``seek()``, ``flush()``,
|
||||
``close()`` are available on these objects, and additionally, a
|
||||
following method:
|
||||
|
||||
|
||||
`alloc_size` constructor creates an empty `BytesIO` object,
|
||||
pre-allocated to hold up to `alloc_size` number of bytes.
|
||||
That means that writing that amount of bytes won't lead to reallocation of the buffer,
|
||||
and thus won't hit out-of-memory situation or lead to memory fragmentation.
|
||||
This constructor is a MicroPython extension and is recommended for usage only in special
|
||||
cases and in system-level libraries, not for end-user applications.
|
||||
|
||||
.. admonition:: Difference to CPython
|
||||
:class: attention
|
||||
|
||||
This constructor is a MicroPython extension.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __init__(self, alloc_size: int, /):
|
||||
"""
|
||||
In-memory file-like objects for input/output. `StringIO` is used for
|
||||
text-mode I/O (similar to a normal file opened with "t" modifier).
|
||||
`BytesIO` is used for binary-mode I/O (similar to a normal file
|
||||
opened with "b" modifier). Initial contents of file-like objects
|
||||
can be specified with *string* parameter (should be normal string
|
||||
for `StringIO` or bytes object for `BytesIO`). All the usual file
|
||||
methods like ``read()``, ``write()``, ``seek()``, ``flush()``,
|
||||
``close()`` are available on these objects, and additionally, a
|
||||
following method:
|
||||
|
||||
|
||||
`alloc_size` constructor creates an empty `BytesIO` object,
|
||||
pre-allocated to hold up to `alloc_size` number of bytes.
|
||||
That means that writing that amount of bytes won't lead to reallocation of the buffer,
|
||||
and thus won't hit out-of-memory situation or lead to memory fragmentation.
|
||||
This constructor is a MicroPython extension and is recommended for usage only in special
|
||||
cases and in system-level libraries, not for end-user applications.
|
||||
|
||||
.. admonition:: Difference to CPython
|
||||
:class: attention
|
||||
|
||||
This constructor is a MicroPython extension.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __init__(self, string: bytes = b"", /):
|
||||
"""
|
||||
In-memory file-like objects for input/output. `StringIO` is used for
|
||||
text-mode I/O (similar to a normal file opened with "t" modifier).
|
||||
`BytesIO` is used for binary-mode I/O (similar to a normal file
|
||||
opened with "b" modifier). Initial contents of file-like objects
|
||||
can be specified with *string* parameter (should be normal string
|
||||
for `StringIO` or bytes object for `BytesIO`). All the usual file
|
||||
methods like ``read()``, ``write()``, ``seek()``, ``flush()``,
|
||||
``close()`` are available on these objects, and additionally, a
|
||||
following method:
|
||||
|
||||
|
||||
`alloc_size` constructor creates an empty `BytesIO` object,
|
||||
pre-allocated to hold up to `alloc_size` number of bytes.
|
||||
That means that writing that amount of bytes won't lead to reallocation of the buffer,
|
||||
and thus won't hit out-of-memory situation or lead to memory fragmentation.
|
||||
This constructor is a MicroPython extension and is recommended for usage only in special
|
||||
cases and in system-level libraries, not for end-user applications.
|
||||
|
||||
.. admonition:: Difference to CPython
|
||||
:class: attention
|
||||
|
||||
This constructor is a MicroPython extension.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __init__(self, alloc_size: int, /):
|
||||
"""
|
||||
In-memory file-like objects for input/output. `StringIO` is used for
|
||||
text-mode I/O (similar to a normal file opened with "t" modifier).
|
||||
`BytesIO` is used for binary-mode I/O (similar to a normal file
|
||||
opened with "b" modifier). Initial contents of file-like objects
|
||||
can be specified with *string* parameter (should be normal string
|
||||
for `StringIO` or bytes object for `BytesIO`). All the usual file
|
||||
methods like ``read()``, ``write()``, ``seek()``, ``flush()``,
|
||||
``close()`` are available on these objects, and additionally, a
|
||||
following method:
|
||||
|
||||
|
||||
`alloc_size` constructor creates an empty `BytesIO` object,
|
||||
pre-allocated to hold up to `alloc_size` number of bytes.
|
||||
That means that writing that amount of bytes won't lead to reallocation of the buffer,
|
||||
and thus won't hit out-of-memory situation or lead to memory fragmentation.
|
||||
This constructor is a MicroPython extension and is recommended for usage only in special
|
||||
cases and in system-level libraries, not for end-user applications.
|
||||
|
||||
.. admonition:: Difference to CPython
|
||||
:class: attention
|
||||
|
||||
This constructor is a MicroPython extension.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __init__(self, string: bytes = b"", /):
|
||||
"""
|
||||
In-memory file-like objects for input/output. `StringIO` is used for
|
||||
text-mode I/O (similar to a normal file opened with "t" modifier).
|
||||
`BytesIO` is used for binary-mode I/O (similar to a normal file
|
||||
opened with "b" modifier). Initial contents of file-like objects
|
||||
can be specified with *string* parameter (should be normal string
|
||||
for `StringIO` or bytes object for `BytesIO`). All the usual file
|
||||
methods like ``read()``, ``write()``, ``seek()``, ``flush()``,
|
||||
``close()`` are available on these objects, and additionally, a
|
||||
following method:
|
||||
|
||||
|
||||
`alloc_size` constructor creates an empty `BytesIO` object,
|
||||
pre-allocated to hold up to `alloc_size` number of bytes.
|
||||
That means that writing that amount of bytes won't lead to reallocation of the buffer,
|
||||
and thus won't hit out-of-memory situation or lead to memory fragmentation.
|
||||
This constructor is a MicroPython extension and is recommended for usage only in special
|
||||
cases and in system-level libraries, not for end-user applications.
|
||||
|
||||
.. admonition:: Difference to CPython
|
||||
:class: attention
|
||||
|
||||
This constructor is a MicroPython extension.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __init__(self, alloc_size: int, /):
|
||||
"""
|
||||
In-memory file-like objects for input/output. `StringIO` is used for
|
||||
text-mode I/O (similar to a normal file opened with "t" modifier).
|
||||
`BytesIO` is used for binary-mode I/O (similar to a normal file
|
||||
opened with "b" modifier). Initial contents of file-like objects
|
||||
can be specified with *string* parameter (should be normal string
|
||||
for `StringIO` or bytes object for `BytesIO`). All the usual file
|
||||
methods like ``read()``, ``write()``, ``seek()``, ``flush()``,
|
||||
``close()`` are available on these objects, and additionally, a
|
||||
following method:
|
||||
|
||||
|
||||
`alloc_size` constructor creates an empty `BytesIO` object,
|
||||
pre-allocated to hold up to `alloc_size` number of bytes.
|
||||
That means that writing that amount of bytes won't lead to reallocation of the buffer,
|
||||
and thus won't hit out-of-memory situation or lead to memory fragmentation.
|
||||
This constructor is a MicroPython extension and is recommended for usage only in special
|
||||
cases and in system-level libraries, not for end-user applications.
|
||||
|
||||
.. admonition:: Difference to CPython
|
||||
:class: attention
|
||||
|
||||
This constructor is a MicroPython extension.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __init__(self, string: bytes = b"", /):
|
||||
"""
|
||||
In-memory file-like objects for input/output. `StringIO` is used for
|
||||
text-mode I/O (similar to a normal file opened with "t" modifier).
|
||||
`BytesIO` is used for binary-mode I/O (similar to a normal file
|
||||
opened with "b" modifier). Initial contents of file-like objects
|
||||
can be specified with *string* parameter (should be normal string
|
||||
for `StringIO` or bytes object for `BytesIO`). All the usual file
|
||||
methods like ``read()``, ``write()``, ``seek()``, ``flush()``,
|
||||
``close()`` are available on these objects, and additionally, a
|
||||
following method:
|
||||
|
||||
|
||||
`alloc_size` constructor creates an empty `BytesIO` object,
|
||||
pre-allocated to hold up to `alloc_size` number of bytes.
|
||||
That means that writing that amount of bytes won't lead to reallocation of the buffer,
|
||||
and thus won't hit out-of-memory situation or lead to memory fragmentation.
|
||||
This constructor is a MicroPython extension and is recommended for usage only in special
|
||||
cases and in system-level libraries, not for end-user applications.
|
||||
|
||||
.. admonition:: Difference to CPython
|
||||
:class: attention
|
||||
|
||||
This constructor is a MicroPython extension.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __init__(self, alloc_size: int, /):
|
||||
"""
|
||||
In-memory file-like objects for input/output. `StringIO` is used for
|
||||
text-mode I/O (similar to a normal file opened with "t" modifier).
|
||||
`BytesIO` is used for binary-mode I/O (similar to a normal file
|
||||
opened with "b" modifier). Initial contents of file-like objects
|
||||
can be specified with *string* parameter (should be normal string
|
||||
for `StringIO` or bytes object for `BytesIO`). All the usual file
|
||||
methods like ``read()``, ``write()``, ``seek()``, ``flush()``,
|
||||
``close()`` are available on these objects, and additionally, a
|
||||
following method:
|
||||
|
||||
|
||||
`alloc_size` constructor creates an empty `BytesIO` object,
|
||||
pre-allocated to hold up to `alloc_size` number of bytes.
|
||||
That means that writing that amount of bytes won't lead to reallocation of the buffer,
|
||||
and thus won't hit out-of-memory situation or lead to memory fragmentation.
|
||||
This constructor is a MicroPython extension and is recommended for usage only in special
|
||||
cases and in system-level libraries, not for end-user applications.
|
||||
|
||||
.. admonition:: Difference to CPython
|
||||
:class: attention
|
||||
|
||||
This constructor is a MicroPython extension.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __init__(self, string: bytes = b"", /):
|
||||
"""
|
||||
In-memory file-like objects for input/output. `StringIO` is used for
|
||||
text-mode I/O (similar to a normal file opened with "t" modifier).
|
||||
`BytesIO` is used for binary-mode I/O (similar to a normal file
|
||||
opened with "b" modifier). Initial contents of file-like objects
|
||||
can be specified with *string* parameter (should be normal string
|
||||
for `StringIO` or bytes object for `BytesIO`). All the usual file
|
||||
methods like ``read()``, ``write()``, ``seek()``, ``flush()``,
|
||||
``close()`` are available on these objects, and additionally, a
|
||||
following method:
|
||||
|
||||
|
||||
`alloc_size` constructor creates an empty `BytesIO` object,
|
||||
pre-allocated to hold up to `alloc_size` number of bytes.
|
||||
That means that writing that amount of bytes won't lead to reallocation of the buffer,
|
||||
and thus won't hit out-of-memory situation or lead to memory fragmentation.
|
||||
This constructor is a MicroPython extension and is recommended for usage only in special
|
||||
cases and in system-level libraries, not for end-user applications.
|
||||
|
||||
.. admonition:: Difference to CPython
|
||||
:class: attention
|
||||
|
||||
This constructor is a MicroPython extension.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __init__(self, alloc_size: int, /):
|
||||
"""
|
||||
In-memory file-like objects for input/output. `StringIO` is used for
|
||||
text-mode I/O (similar to a normal file opened with "t" modifier).
|
||||
`BytesIO` is used for binary-mode I/O (similar to a normal file
|
||||
opened with "b" modifier). Initial contents of file-like objects
|
||||
can be specified with *string* parameter (should be normal string
|
||||
for `StringIO` or bytes object for `BytesIO`). All the usual file
|
||||
methods like ``read()``, ``write()``, ``seek()``, ``flush()``,
|
||||
``close()`` are available on these objects, and additionally, a
|
||||
following method:
|
||||
|
||||
|
||||
`alloc_size` constructor creates an empty `BytesIO` object,
|
||||
pre-allocated to hold up to `alloc_size` number of bytes.
|
||||
That means that writing that amount of bytes won't lead to reallocation of the buffer,
|
||||
and thus won't hit out-of-memory situation or lead to memory fragmentation.
|
||||
This constructor is a MicroPython extension and is recommended for usage only in special
|
||||
cases and in system-level libraries, not for end-user applications.
|
||||
|
||||
.. admonition:: Difference to CPython
|
||||
:class: attention
|
||||
|
||||
This constructor is a MicroPython extension.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __init__(self, string: bytes = b"", /):
|
||||
"""
|
||||
In-memory file-like objects for input/output. `StringIO` is used for
|
||||
text-mode I/O (similar to a normal file opened with "t" modifier).
|
||||
`BytesIO` is used for binary-mode I/O (similar to a normal file
|
||||
opened with "b" modifier). Initial contents of file-like objects
|
||||
can be specified with *string* parameter (should be normal string
|
||||
for `StringIO` or bytes object for `BytesIO`). All the usual file
|
||||
methods like ``read()``, ``write()``, ``seek()``, ``flush()``,
|
||||
``close()`` are available on these objects, and additionally, a
|
||||
following method:
|
||||
|
||||
|
||||
`alloc_size` constructor creates an empty `BytesIO` object,
|
||||
pre-allocated to hold up to `alloc_size` number of bytes.
|
||||
That means that writing that amount of bytes won't lead to reallocation of the buffer,
|
||||
and thus won't hit out-of-memory situation or lead to memory fragmentation.
|
||||
This constructor is a MicroPython extension and is recommended for usage only in special
|
||||
cases and in system-level libraries, not for end-user applications.
|
||||
|
||||
.. admonition:: Difference to CPython
|
||||
:class: attention
|
||||
|
||||
This constructor is a MicroPython extension.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __init__(self, alloc_size: int, /):
|
||||
"""
|
||||
In-memory file-like objects for input/output. `StringIO` is used for
|
||||
text-mode I/O (similar to a normal file opened with "t" modifier).
|
||||
`BytesIO` is used for binary-mode I/O (similar to a normal file
|
||||
opened with "b" modifier). Initial contents of file-like objects
|
||||
can be specified with *string* parameter (should be normal string
|
||||
for `StringIO` or bytes object for `BytesIO`). All the usual file
|
||||
methods like ``read()``, ``write()``, ``seek()``, ``flush()``,
|
||||
``close()`` are available on these objects, and additionally, a
|
||||
following method:
|
||||
|
||||
|
||||
`alloc_size` constructor creates an empty `BytesIO` object,
|
||||
pre-allocated to hold up to `alloc_size` number of bytes.
|
||||
That means that writing that amount of bytes won't lead to reallocation of the buffer,
|
||||
and thus won't hit out-of-memory situation or lead to memory fragmentation.
|
||||
This constructor is a MicroPython extension and is recommended for usage only in special
|
||||
cases and in system-level libraries, not for end-user applications.
|
||||
|
||||
.. admonition:: Difference to CPython
|
||||
:class: attention
|
||||
|
||||
This constructor is a MicroPython extension.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __init__(self, string: bytes = b"", /):
|
||||
"""
|
||||
In-memory file-like objects for input/output. `StringIO` is used for
|
||||
text-mode I/O (similar to a normal file opened with "t" modifier).
|
||||
`BytesIO` is used for binary-mode I/O (similar to a normal file
|
||||
opened with "b" modifier). Initial contents of file-like objects
|
||||
can be specified with *string* parameter (should be normal string
|
||||
for `StringIO` or bytes object for `BytesIO`). All the usual file
|
||||
methods like ``read()``, ``write()``, ``seek()``, ``flush()``,
|
||||
``close()`` are available on these objects, and additionally, a
|
||||
following method:
|
||||
|
||||
|
||||
`alloc_size` constructor creates an empty `BytesIO` object,
|
||||
pre-allocated to hold up to `alloc_size` number of bytes.
|
||||
That means that writing that amount of bytes won't lead to reallocation of the buffer,
|
||||
and thus won't hit out-of-memory situation or lead to memory fragmentation.
|
||||
This constructor is a MicroPython extension and is recommended for usage only in special
|
||||
cases and in system-level libraries, not for end-user applications.
|
||||
|
||||
.. admonition:: Difference to CPython
|
||||
:class: attention
|
||||
|
||||
This constructor is a MicroPython extension.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __init__(self, alloc_size: int, /):
|
||||
"""
|
||||
In-memory file-like objects for input/output. `StringIO` is used for
|
||||
text-mode I/O (similar to a normal file opened with "t" modifier).
|
||||
`BytesIO` is used for binary-mode I/O (similar to a normal file
|
||||
opened with "b" modifier). Initial contents of file-like objects
|
||||
can be specified with *string* parameter (should be normal string
|
||||
for `StringIO` or bytes object for `BytesIO`). All the usual file
|
||||
methods like ``read()``, ``write()``, ``seek()``, ``flush()``,
|
||||
``close()`` are available on these objects, and additionally, a
|
||||
following method:
|
||||
|
||||
|
||||
`alloc_size` constructor creates an empty `BytesIO` object,
|
||||
pre-allocated to hold up to `alloc_size` number of bytes.
|
||||
That means that writing that amount of bytes won't lead to reallocation of the buffer,
|
||||
and thus won't hit out-of-memory situation or lead to memory fragmentation.
|
||||
This constructor is a MicroPython extension and is recommended for usage only in special
|
||||
cases and in system-level libraries, not for end-user applications.
|
||||
|
||||
.. admonition:: Difference to CPython
|
||||
:class: attention
|
||||
|
||||
This constructor is a MicroPython extension.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def open(name: _OpenFile, /, **kwargs) -> TextIOWrapper:
|
||||
"""
|
||||
Open a file. Builtin ``open()`` function is aliased to this function.
|
||||
All ports (which provide access to file system) are required to support
|
||||
*mode* parameter, but support for other arguments vary by port.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def open(name: _OpenFile, mode: _OpenTextModeWriting = ..., /, **kwargs) -> TextIOWrapper:
|
||||
"""
|
||||
Open a file. Builtin ``open()`` function is aliased to this function.
|
||||
All ports (which provide access to file system) are required to support
|
||||
*mode* parameter, but support for other arguments vary by port.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def open(name: _OpenFile, mode: _OpenBinaryMode = ..., /, **kwargs) -> FileIO:
|
||||
"""
|
||||
Open a file. Builtin ``open()`` function is aliased to this function.
|
||||
All ports (which provide access to file system) are required to support
|
||||
*mode* parameter, but support for other arguments vary by port.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def open(name: _OpenFile, /, **kwargs) -> TextIOWrapper:
|
||||
"""
|
||||
Open a file. Builtin ``open()`` function is aliased to this function.
|
||||
All ports (which provide access to file system) are required to support
|
||||
*mode* parameter, but support for other arguments vary by port.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def open(name: _OpenFile, mode: _OpenTextModeWriting = ..., /, **kwargs) -> TextIOWrapper:
|
||||
"""
|
||||
Open a file. Builtin ``open()`` function is aliased to this function.
|
||||
All ports (which provide access to file system) are required to support
|
||||
*mode* parameter, but support for other arguments vary by port.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def open(name: _OpenFile, mode: _OpenBinaryMode = ..., /, **kwargs) -> FileIO:
|
||||
"""
|
||||
Open a file. Builtin ``open()`` function is aliased to this function.
|
||||
All ports (which provide access to file system) are required to support
|
||||
*mode* parameter, but support for other arguments vary by port.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def open(name: _OpenFile, /, **kwargs) -> TextIOWrapper:
|
||||
"""
|
||||
Open a file. Builtin ``open()`` function is aliased to this function.
|
||||
All ports (which provide access to file system) are required to support
|
||||
*mode* parameter, but support for other arguments vary by port.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def open(name: _OpenFile, mode: _OpenTextModeWriting = ..., /, **kwargs) -> TextIOWrapper:
|
||||
"""
|
||||
Open a file. Builtin ``open()`` function is aliased to this function.
|
||||
All ports (which provide access to file system) are required to support
|
||||
*mode* parameter, but support for other arguments vary by port.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def open(name: _OpenFile, mode: _OpenBinaryMode = ..., /, **kwargs) -> FileIO:
|
||||
"""
|
||||
Open a file. Builtin ``open()`` function is aliased to this function.
|
||||
All ports (which provide access to file system) are required to support
|
||||
*mode* parameter, but support for other arguments vary by port.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def open(name: _OpenFile, /, **kwargs) -> TextIOWrapper:
|
||||
"""
|
||||
Open a file. Builtin ``open()`` function is aliased to this function.
|
||||
All ports (which provide access to file system) are required to support
|
||||
*mode* parameter, but support for other arguments vary by port.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def open(name: _OpenFile, mode: _OpenTextModeWriting = ..., /, **kwargs) -> TextIOWrapper:
|
||||
"""
|
||||
Open a file. Builtin ``open()`` function is aliased to this function.
|
||||
All ports (which provide access to file system) are required to support
|
||||
*mode* parameter, but support for other arguments vary by port.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def open(name: _OpenFile, mode: _OpenBinaryMode = ..., /, **kwargs) -> FileIO:
|
||||
"""
|
||||
Open a file. Builtin ``open()`` function is aliased to this function.
|
||||
All ports (which provide access to file system) are required to support
|
||||
*mode* parameter, but support for other arguments vary by port.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def open(name: _OpenFile, /, **kwargs) -> TextIOWrapper:
|
||||
"""
|
||||
Open a file. Builtin ``open()`` function is aliased to this function.
|
||||
All ports (which provide access to file system) are required to support
|
||||
*mode* parameter, but support for other arguments vary by port.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def open(name: _OpenFile, mode: _OpenTextModeWriting = ..., /, **kwargs) -> TextIOWrapper:
|
||||
"""
|
||||
Open a file. Builtin ``open()`` function is aliased to this function.
|
||||
All ports (which provide access to file system) are required to support
|
||||
*mode* parameter, but support for other arguments vary by port.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def open(name: _OpenFile, mode: _OpenBinaryMode = ..., /, **kwargs) -> FileIO:
|
||||
"""
|
||||
Open a file. Builtin ``open()`` function is aliased to this function.
|
||||
All ports (which provide access to file system) are required to support
|
||||
*mode* parameter, but support for other arguments vary by port.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def open(name: _OpenFile, /, **kwargs) -> TextIOWrapper:
|
||||
"""
|
||||
Open a file. Builtin ``open()`` function is aliased to this function.
|
||||
All ports (which provide access to file system) are required to support
|
||||
*mode* parameter, but support for other arguments vary by port.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def open(name: _OpenFile, mode: _OpenTextModeWriting = ..., /, **kwargs) -> TextIOWrapper:
|
||||
"""
|
||||
Open a file. Builtin ``open()`` function is aliased to this function.
|
||||
All ports (which provide access to file system) are required to support
|
||||
*mode* parameter, but support for other arguments vary by port.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def open(name: _OpenFile, mode: _OpenBinaryMode = ..., /, **kwargs) -> FileIO:
|
||||
"""
|
||||
Open a file. Builtin ``open()`` function is aliased to this function.
|
||||
All ports (which provide access to file system) are required to support
|
||||
*mode* parameter, but support for other arguments vary by port.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def open(name: _OpenFile, /, **kwargs) -> TextIOWrapper:
|
||||
"""
|
||||
Open a file. Builtin ``open()`` function is aliased to this function.
|
||||
All ports (which provide access to file system) are required to support
|
||||
*mode* parameter, but support for other arguments vary by port.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def open(name: _OpenFile, mode: _OpenTextModeWriting = ..., /, **kwargs) -> TextIOWrapper:
|
||||
"""
|
||||
Open a file. Builtin ``open()`` function is aliased to this function.
|
||||
All ports (which provide access to file system) are required to support
|
||||
*mode* parameter, but support for other arguments vary by port.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def open(name: _OpenFile, mode: _OpenBinaryMode = ..., /, **kwargs) -> FileIO:
|
||||
"""
|
||||
Open a file. Builtin ``open()`` function is aliased to this function.
|
||||
All ports (which provide access to file system) are required to support
|
||||
*mode* parameter, but support for other arguments vary by port.
|
||||
"""
|
||||
321
.venv/lib/python3.12/site-packages/stdlib/json/__init__.pyi
Normal file
321
.venv/lib/python3.12/site-packages/stdlib/json/__init__.pyi
Normal file
@@ -0,0 +1,321 @@
|
||||
"""
|
||||
JSON encoding and decoding.
|
||||
|
||||
MicroPython module: https://docs.micropython.org/en/v1.26.0/library/json.html
|
||||
|
||||
CPython module: :mod:`python:json` https://docs.python.org/3/library/json.html .
|
||||
|
||||
This modules allows to convert between Python objects and the JSON
|
||||
data format.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
from _typeshed import Incomplete, SupportsRead, SupportsWrite
|
||||
from typing import AnyStr, Tuple, overload, Any
|
||||
|
||||
from .decoder import JSONDecodeError as JSONDecodeError, JSONDecoder as JSONDecoder
|
||||
from .encoder import JSONEncoder as JSONEncoder
|
||||
from _mpy_shed import IOBase_mp
|
||||
from typing_extensions import Awaitable, TypeAlias, TypeVar
|
||||
|
||||
__all__ = ["dump", "dumps", "load", "loads", "JSONDecoder", "JSONDecodeError", "JSONEncoder"]
|
||||
|
||||
@overload
|
||||
def dumps(obj: Any) -> str:
|
||||
"""
|
||||
Return *obj* represented as a JSON string.
|
||||
|
||||
The arguments have the same meaning as in `dump`.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def dumps(obj: Any, separators: Tuple[str, str]) -> str:
|
||||
"""
|
||||
Return *obj* represented as a JSON string.
|
||||
|
||||
The arguments have the same meaning as in `dump`.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def dumps(obj: Any) -> str:
|
||||
"""
|
||||
Return *obj* represented as a JSON string.
|
||||
|
||||
The arguments have the same meaning as in `dump`.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def dumps(obj: Any, separators: Tuple[str, str]) -> str:
|
||||
"""
|
||||
Return *obj* represented as a JSON string.
|
||||
|
||||
The arguments have the same meaning as in `dump`.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def dumps(obj: Any) -> str:
|
||||
"""
|
||||
Return *obj* represented as a JSON string.
|
||||
|
||||
The arguments have the same meaning as in `dump`.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def dumps(obj: Any, separators: Tuple[str, str]) -> str:
|
||||
"""
|
||||
Return *obj* represented as a JSON string.
|
||||
|
||||
The arguments have the same meaning as in `dump`.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def dumps(obj: Any) -> str:
|
||||
"""
|
||||
Return *obj* represented as a JSON string.
|
||||
|
||||
The arguments have the same meaning as in `dump`.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def dumps(obj: Any, separators: Tuple[str, str]) -> str:
|
||||
"""
|
||||
Return *obj* represented as a JSON string.
|
||||
|
||||
The arguments have the same meaning as in `dump`.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def dumps(obj: Any) -> str:
|
||||
"""
|
||||
Return *obj* represented as a JSON string.
|
||||
|
||||
The arguments have the same meaning as in `dump`.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def dumps(obj: Any, separators: Tuple[str, str]) -> str:
|
||||
"""
|
||||
Return *obj* represented as a JSON string.
|
||||
|
||||
The arguments have the same meaning as in `dump`.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def dumps(obj: Any) -> str:
|
||||
"""
|
||||
Return *obj* represented as a JSON string.
|
||||
|
||||
The arguments have the same meaning as in `dump`.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def dumps(obj: Any, separators: Tuple[str, str]) -> str:
|
||||
"""
|
||||
Return *obj* represented as a JSON string.
|
||||
|
||||
The arguments have the same meaning as in `dump`.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def dumps(obj: Any) -> str:
|
||||
"""
|
||||
Return *obj* represented as a JSON string.
|
||||
|
||||
The arguments have the same meaning as in `dump`.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def dumps(obj: Any, separators: Tuple[str, str]) -> str:
|
||||
"""
|
||||
Return *obj* represented as a JSON string.
|
||||
|
||||
The arguments have the same meaning as in `dump`.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def dump(obj: Any, stream: IOBase_mp | Incomplete, /) -> None:
|
||||
"""
|
||||
Serialise *obj* to a JSON string, writing it to the given *stream*.
|
||||
|
||||
If specified, separators should be an ``(item_separator, key_separator)``
|
||||
tuple. The default is ``(', ', ': ')``. To get the most compact JSON
|
||||
representation, you should specify ``(',', ':')`` to eliminate whitespace.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def dump(obj: Any, stream: IOBase_mp | Incomplete, separators: Tuple[str, str], /) -> None:
|
||||
"""
|
||||
Serialise *obj* to a JSON string, writing it to the given *stream*.
|
||||
|
||||
If specified, separators should be an ``(item_separator, key_separator)``
|
||||
tuple. The default is ``(', ', ': ')``. To get the most compact JSON
|
||||
representation, you should specify ``(',', ':')`` to eliminate whitespace.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def dump(obj: Any, stream: IOBase_mp | Incomplete, /) -> None:
|
||||
"""
|
||||
Serialise *obj* to a JSON string, writing it to the given *stream*.
|
||||
|
||||
If specified, separators should be an ``(item_separator, key_separator)``
|
||||
tuple. The default is ``(', ', ': ')``. To get the most compact JSON
|
||||
representation, you should specify ``(',', ':')`` to eliminate whitespace.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def dump(obj: Any, stream: IOBase_mp | Incomplete, separators: Tuple[str, str], /) -> None:
|
||||
"""
|
||||
Serialise *obj* to a JSON string, writing it to the given *stream*.
|
||||
|
||||
If specified, separators should be an ``(item_separator, key_separator)``
|
||||
tuple. The default is ``(', ', ': ')``. To get the most compact JSON
|
||||
representation, you should specify ``(',', ':')`` to eliminate whitespace.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def dump(obj: Any, stream: IOBase_mp | Incomplete, /) -> None:
|
||||
"""
|
||||
Serialise *obj* to a JSON string, writing it to the given *stream*.
|
||||
|
||||
If specified, separators should be an ``(item_separator, key_separator)``
|
||||
tuple. The default is ``(', ', ': ')``. To get the most compact JSON
|
||||
representation, you should specify ``(',', ':')`` to eliminate whitespace.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def dump(obj: Any, stream: IOBase_mp | Incomplete, separators: Tuple[str, str], /) -> None:
|
||||
"""
|
||||
Serialise *obj* to a JSON string, writing it to the given *stream*.
|
||||
|
||||
If specified, separators should be an ``(item_separator, key_separator)``
|
||||
tuple. The default is ``(', ', ': ')``. To get the most compact JSON
|
||||
representation, you should specify ``(',', ':')`` to eliminate whitespace.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def dump(obj: Any, stream: IOBase_mp | Incomplete, /) -> None:
|
||||
"""
|
||||
Serialise *obj* to a JSON string, writing it to the given *stream*.
|
||||
|
||||
If specified, separators should be an ``(item_separator, key_separator)``
|
||||
tuple. The default is ``(', ', ': ')``. To get the most compact JSON
|
||||
representation, you should specify ``(',', ':')`` to eliminate whitespace.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def dump(obj: Any, stream: IOBase_mp | Incomplete, separators: Tuple[str, str], /) -> None:
|
||||
"""
|
||||
Serialise *obj* to a JSON string, writing it to the given *stream*.
|
||||
|
||||
If specified, separators should be an ``(item_separator, key_separator)``
|
||||
tuple. The default is ``(', ', ': ')``. To get the most compact JSON
|
||||
representation, you should specify ``(',', ':')`` to eliminate whitespace.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def dump(obj: Any, stream: IOBase_mp | Incomplete, /) -> None:
|
||||
"""
|
||||
Serialise *obj* to a JSON string, writing it to the given *stream*.
|
||||
|
||||
If specified, separators should be an ``(item_separator, key_separator)``
|
||||
tuple. The default is ``(', ', ': ')``. To get the most compact JSON
|
||||
representation, you should specify ``(',', ':')`` to eliminate whitespace.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def dump(obj: Any, stream: IOBase_mp | Incomplete, separators: Tuple[str, str], /) -> None:
|
||||
"""
|
||||
Serialise *obj* to a JSON string, writing it to the given *stream*.
|
||||
|
||||
If specified, separators should be an ``(item_separator, key_separator)``
|
||||
tuple. The default is ``(', ', ': ')``. To get the most compact JSON
|
||||
representation, you should specify ``(',', ':')`` to eliminate whitespace.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def dump(obj: Any, stream: IOBase_mp | Incomplete, /) -> None:
|
||||
"""
|
||||
Serialise *obj* to a JSON string, writing it to the given *stream*.
|
||||
|
||||
If specified, separators should be an ``(item_separator, key_separator)``
|
||||
tuple. The default is ``(', ', ': ')``. To get the most compact JSON
|
||||
representation, you should specify ``(',', ':')`` to eliminate whitespace.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def dump(obj: Any, stream: IOBase_mp | Incomplete, separators: Tuple[str, str], /) -> None:
|
||||
"""
|
||||
Serialise *obj* to a JSON string, writing it to the given *stream*.
|
||||
|
||||
If specified, separators should be an ``(item_separator, key_separator)``
|
||||
tuple. The default is ``(', ', ': ')``. To get the most compact JSON
|
||||
representation, you should specify ``(',', ':')`` to eliminate whitespace.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def dump(obj: Any, stream: IOBase_mp | Incomplete, /) -> None:
|
||||
"""
|
||||
Serialise *obj* to a JSON string, writing it to the given *stream*.
|
||||
|
||||
If specified, separators should be an ``(item_separator, key_separator)``
|
||||
tuple. The default is ``(', ', ': ')``. To get the most compact JSON
|
||||
representation, you should specify ``(',', ':')`` to eliminate whitespace.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def dump(obj: Any, stream: IOBase_mp | Incomplete, separators: Tuple[str, str], /) -> None:
|
||||
"""
|
||||
Serialise *obj* to a JSON string, writing it to the given *stream*.
|
||||
|
||||
If specified, separators should be an ``(item_separator, key_separator)``
|
||||
tuple. The default is ``(', ', ': ')``. To get the most compact JSON
|
||||
representation, you should specify ``(',', ':')`` to eliminate whitespace.
|
||||
"""
|
||||
...
|
||||
|
||||
def loads(str: AnyStr) -> Any:
|
||||
"""
|
||||
Parse the JSON *str* and return an object. Raises :exc:`ValueError` if the
|
||||
string is not correctly formed.
|
||||
"""
|
||||
...
|
||||
|
||||
def load(stream: IOBase_mp | Incomplete) -> Any:
|
||||
"""
|
||||
Parse the given *stream*, interpreting it as a JSON string and
|
||||
deserialising the data to a Python object. The resulting object is
|
||||
returned.
|
||||
|
||||
Parsing continues until end-of-file is encountered.
|
||||
A :exc:`ValueError` is raised if the data in *stream* is not correctly formed.
|
||||
"""
|
||||
...
|
||||
|
||||
def detect_encoding(b: bytes | bytearray) -> str: ... # undocumented
|
||||
1556
.venv/lib/python3.12/site-packages/stdlib/os/__init__.pyi
Normal file
1556
.venv/lib/python3.12/site-packages/stdlib/os/__init__.pyi
Normal file
File diff suppressed because it is too large
Load Diff
308
.venv/lib/python3.12/site-packages/stdlib/re.pyi
Normal file
308
.venv/lib/python3.12/site-packages/stdlib/re.pyi
Normal file
@@ -0,0 +1,308 @@
|
||||
import enum
|
||||
import sre_compile
|
||||
import sre_constants
|
||||
import sys
|
||||
from _typeshed import MaybeNone, ReadableBuffer
|
||||
from collections.abc import Callable, Iterator, Mapping
|
||||
from typing import Any, AnyStr, Generic, Literal, TypeVar, final, overload
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
from types import GenericAlias
|
||||
|
||||
__all__ = [
|
||||
"match",
|
||||
"fullmatch",
|
||||
"search",
|
||||
"sub",
|
||||
"subn",
|
||||
"split",
|
||||
"findall",
|
||||
"finditer",
|
||||
"compile",
|
||||
"purge",
|
||||
"escape",
|
||||
"error",
|
||||
"A",
|
||||
"I",
|
||||
"L",
|
||||
"M",
|
||||
"S",
|
||||
"X",
|
||||
"U",
|
||||
"ASCII",
|
||||
"IGNORECASE",
|
||||
"LOCALE",
|
||||
"MULTILINE",
|
||||
"DOTALL",
|
||||
"VERBOSE",
|
||||
"UNICODE",
|
||||
"Match",
|
||||
"Pattern",
|
||||
]
|
||||
if sys.version_info < (3, 13):
|
||||
__all__ += ["template"]
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
__all__ += ["NOFLAG", "RegexFlag"]
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
__all__ += ["PatternError"]
|
||||
|
||||
PatternError = sre_constants.error
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
# The implementation defines this in re._constants (version_info >= 3, 11) or
|
||||
# sre_constants. Typeshed has it here because its __module__ attribute is set to "re".
|
||||
class error(Exception):
|
||||
msg: str
|
||||
pattern: str | bytes | None
|
||||
pos: int | None
|
||||
lineno: int
|
||||
colno: int
|
||||
def __init__(self, msg: str, pattern: str | bytes | None = None, pos: int | None = None) -> None: ...
|
||||
|
||||
@final
|
||||
class Match(Generic[AnyStr]):
|
||||
@property
|
||||
def pos(self) -> int: ...
|
||||
@property
|
||||
def endpos(self) -> int: ...
|
||||
@property
|
||||
def lastindex(self) -> int | None: ...
|
||||
@property
|
||||
def lastgroup(self) -> str | None: ...
|
||||
@property
|
||||
def string(self) -> AnyStr: ...
|
||||
|
||||
# The regular expression object whose match() or search() method produced
|
||||
# this match instance.
|
||||
@property
|
||||
def re(self) -> Pattern[AnyStr]: ...
|
||||
@overload
|
||||
def expand(self: Match[str], template: str) -> str: ...
|
||||
@overload
|
||||
def expand(self: Match[bytes], template: ReadableBuffer) -> bytes: ...
|
||||
@overload
|
||||
def expand(self, template: AnyStr) -> AnyStr: ...
|
||||
# group() returns "AnyStr" or "AnyStr | None", depending on the pattern.
|
||||
@overload
|
||||
def group(self, group: Literal[0] = 0, /) -> AnyStr: ...
|
||||
@overload
|
||||
def group(self, group: str | int, /) -> AnyStr | MaybeNone: ...
|
||||
@overload
|
||||
def group(self, group1: str | int, group2: str | int, /, *groups: str | int) -> tuple[AnyStr | MaybeNone, ...]: ...
|
||||
# Each item of groups()'s return tuple is either "AnyStr" or
|
||||
# "AnyStr | None", depending on the pattern.
|
||||
@overload
|
||||
def groups(self) -> tuple[AnyStr | MaybeNone, ...]: ...
|
||||
@overload
|
||||
def groups(self, default: _T) -> tuple[AnyStr | _T, ...]: ...
|
||||
# Each value in groupdict()'s return dict is either "AnyStr" or
|
||||
# "AnyStr | None", depending on the pattern.
|
||||
@overload
|
||||
def groupdict(self) -> dict[str, AnyStr | MaybeNone]: ...
|
||||
@overload
|
||||
def groupdict(self, default: _T) -> dict[str, AnyStr | _T]: ...
|
||||
def start(self, group: int | str = 0, /) -> int: ...
|
||||
def end(self, group: int | str = 0, /) -> int: ...
|
||||
def span(self, group: int | str = 0, /) -> tuple[int, int]: ...
|
||||
@property
|
||||
def regs(self) -> tuple[tuple[int, int], ...]: ... # undocumented
|
||||
# __getitem__() returns "AnyStr" or "AnyStr | None", depending on the pattern.
|
||||
@overload
|
||||
def __getitem__(self, key: Literal[0], /) -> AnyStr: ...
|
||||
@overload
|
||||
def __getitem__(self, key: int | str, /) -> AnyStr | MaybeNone: ...
|
||||
def __copy__(self) -> Match[AnyStr]: ...
|
||||
def __deepcopy__(self, memo: Any, /) -> Match[AnyStr]: ...
|
||||
if sys.version_info >= (3, 9):
|
||||
def __class_getitem__(cls, item: Any, /) -> GenericAlias: ...
|
||||
|
||||
@final
|
||||
class Pattern(Generic[AnyStr]):
|
||||
@property
|
||||
def flags(self) -> int: ...
|
||||
@property
|
||||
def groupindex(self) -> Mapping[str, int]: ...
|
||||
@property
|
||||
def groups(self) -> int: ...
|
||||
@property
|
||||
def pattern(self) -> AnyStr: ...
|
||||
@overload
|
||||
def search(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Match[str] | None: ...
|
||||
@overload
|
||||
def search(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> Match[bytes] | None: ...
|
||||
@overload
|
||||
def search(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> Match[AnyStr] | None: ...
|
||||
@overload
|
||||
def match(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Match[str] | None: ...
|
||||
@overload
|
||||
def match(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> Match[bytes] | None: ...
|
||||
@overload
|
||||
def match(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> Match[AnyStr] | None: ...
|
||||
@overload
|
||||
def fullmatch(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Match[str] | None: ...
|
||||
@overload
|
||||
def fullmatch(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> Match[bytes] | None: ...
|
||||
@overload
|
||||
def fullmatch(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> Match[AnyStr] | None: ...
|
||||
@overload
|
||||
def split(self: Pattern[str], string: str, maxsplit: int = 0) -> list[str | MaybeNone]: ...
|
||||
@overload
|
||||
def split(self: Pattern[bytes], string: ReadableBuffer, maxsplit: int = 0) -> list[bytes | MaybeNone]: ...
|
||||
@overload
|
||||
def split(self, string: AnyStr, maxsplit: int = 0) -> list[AnyStr | MaybeNone]: ...
|
||||
# return type depends on the number of groups in the pattern
|
||||
@overload
|
||||
def findall(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> list[Any]: ...
|
||||
@overload
|
||||
def findall(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> list[Any]: ...
|
||||
@overload
|
||||
def findall(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> list[AnyStr]: ...
|
||||
@overload
|
||||
def finditer(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Iterator[Match[str]]: ...
|
||||
@overload
|
||||
def finditer(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> Iterator[Match[bytes]]: ...
|
||||
@overload
|
||||
def finditer(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> Iterator[Match[AnyStr]]: ...
|
||||
@overload
|
||||
def sub(self: Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = 0) -> str: ...
|
||||
@overload
|
||||
def sub(
|
||||
self: Pattern[bytes],
|
||||
repl: ReadableBuffer | Callable[[Match[bytes]], ReadableBuffer],
|
||||
string: ReadableBuffer,
|
||||
count: int = 0,
|
||||
) -> bytes: ...
|
||||
@overload
|
||||
def sub(self, repl: AnyStr | Callable[[Match[AnyStr]], AnyStr], string: AnyStr, count: int = 0) -> AnyStr: ...
|
||||
@overload
|
||||
def subn(self: Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = 0) -> tuple[str, int]: ...
|
||||
@overload
|
||||
def subn(
|
||||
self: Pattern[bytes],
|
||||
repl: ReadableBuffer | Callable[[Match[bytes]], ReadableBuffer],
|
||||
string: ReadableBuffer,
|
||||
count: int = 0,
|
||||
) -> tuple[bytes, int]: ...
|
||||
@overload
|
||||
def subn(self, repl: AnyStr | Callable[[Match[AnyStr]], AnyStr], string: AnyStr, count: int = 0) -> tuple[AnyStr, int]: ...
|
||||
def __copy__(self) -> Pattern[AnyStr]: ...
|
||||
def __deepcopy__(self, memo: Any, /) -> Pattern[AnyStr]: ...
|
||||
def __eq__(self, value: object, /) -> bool: ...
|
||||
def __hash__(self) -> int: ...
|
||||
if sys.version_info >= (3, 9):
|
||||
def __class_getitem__(cls, item: Any, /) -> GenericAlias: ...
|
||||
|
||||
# ----- re variables and constants -----
|
||||
|
||||
class RegexFlag(enum.IntFlag):
|
||||
A = sre_compile.SRE_FLAG_ASCII
|
||||
ASCII = A
|
||||
DEBUG = sre_compile.SRE_FLAG_DEBUG
|
||||
I = sre_compile.SRE_FLAG_IGNORECASE
|
||||
IGNORECASE = I
|
||||
L = sre_compile.SRE_FLAG_LOCALE
|
||||
LOCALE = L
|
||||
M = sre_compile.SRE_FLAG_MULTILINE
|
||||
MULTILINE = M
|
||||
S = sre_compile.SRE_FLAG_DOTALL
|
||||
DOTALL = S
|
||||
X = sre_compile.SRE_FLAG_VERBOSE
|
||||
VERBOSE = X
|
||||
U = sre_compile.SRE_FLAG_UNICODE
|
||||
UNICODE = U
|
||||
if sys.version_info < (3, 13):
|
||||
T = sre_compile.SRE_FLAG_TEMPLATE
|
||||
TEMPLATE = T
|
||||
if sys.version_info >= (3, 11):
|
||||
NOFLAG = 0
|
||||
|
||||
A = RegexFlag.A
|
||||
ASCII = RegexFlag.ASCII
|
||||
DEBUG = RegexFlag.DEBUG
|
||||
I = RegexFlag.I
|
||||
IGNORECASE = RegexFlag.IGNORECASE
|
||||
L = RegexFlag.L
|
||||
LOCALE = RegexFlag.LOCALE
|
||||
M = RegexFlag.M
|
||||
MULTILINE = RegexFlag.MULTILINE
|
||||
S = RegexFlag.S
|
||||
DOTALL = RegexFlag.DOTALL
|
||||
X = RegexFlag.X
|
||||
VERBOSE = RegexFlag.VERBOSE
|
||||
U = RegexFlag.U
|
||||
UNICODE = RegexFlag.UNICODE
|
||||
if sys.version_info < (3, 13):
|
||||
T = RegexFlag.T
|
||||
TEMPLATE = RegexFlag.TEMPLATE
|
||||
if sys.version_info >= (3, 11):
|
||||
NOFLAG = RegexFlag.NOFLAG
|
||||
_FlagsType: TypeAlias = int | RegexFlag
|
||||
|
||||
# Type-wise the compile() overloads are unnecessary, they could also be modeled using
|
||||
# unions in the parameter types. However mypy has a bug regarding TypeVar
|
||||
# constraints (https://github.com/python/mypy/issues/11880),
|
||||
# which limits us here because AnyStr is a constrained TypeVar.
|
||||
|
||||
# pattern arguments do *not* accept arbitrary buffers such as bytearray,
|
||||
# because the pattern must be hashable.
|
||||
@overload
|
||||
def compile(pattern: AnyStr, flags: _FlagsType = 0) -> Pattern[AnyStr]: ...
|
||||
@overload
|
||||
def compile(pattern: Pattern[AnyStr], flags: _FlagsType = 0) -> Pattern[AnyStr]: ...
|
||||
@overload
|
||||
def search(pattern: str | Pattern[str], string: str, flags: _FlagsType = 0) -> Match[str] | None: ...
|
||||
@overload
|
||||
def search(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = 0) -> Match[bytes] | None: ...
|
||||
@overload
|
||||
def match(pattern: str | Pattern[str], string: str, flags: _FlagsType = 0) -> Match[str] | None: ...
|
||||
@overload
|
||||
def match(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = 0) -> Match[bytes] | None: ...
|
||||
@overload
|
||||
def fullmatch(pattern: str | Pattern[str], string: str, flags: _FlagsType = 0) -> Match[str] | None: ...
|
||||
@overload
|
||||
def fullmatch(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = 0) -> Match[bytes] | None: ...
|
||||
@overload
|
||||
def split(pattern: str | Pattern[str], string: str, maxsplit: int = 0, flags: _FlagsType = 0) -> list[str | MaybeNone]: ...
|
||||
@overload
|
||||
def split(pattern: bytes | Pattern[bytes], string: ReadableBuffer, maxsplit: int = 0, flags: _FlagsType = 0) -> list[bytes | MaybeNone]: ...
|
||||
@overload
|
||||
def findall(pattern: str | Pattern[str], string: str, flags: _FlagsType = 0) -> list[Any]: ...
|
||||
@overload
|
||||
def findall(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = 0) -> list[Any]: ...
|
||||
@overload
|
||||
def finditer(pattern: str | Pattern[str], string: str, flags: _FlagsType = 0) -> Iterator[Match[str]]: ...
|
||||
@overload
|
||||
def finditer(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = 0) -> Iterator[Match[bytes]]: ...
|
||||
@overload
|
||||
def sub(
|
||||
pattern: str | Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = 0, flags: _FlagsType = 0
|
||||
) -> str: ...
|
||||
@overload
|
||||
def sub(
|
||||
pattern: bytes | Pattern[bytes],
|
||||
repl: ReadableBuffer | Callable[[Match[bytes]], ReadableBuffer],
|
||||
string: ReadableBuffer,
|
||||
count: int = 0,
|
||||
flags: _FlagsType = 0,
|
||||
) -> bytes: ...
|
||||
@overload
|
||||
def subn(
|
||||
pattern: str | Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = 0, flags: _FlagsType = 0
|
||||
) -> tuple[str, int]: ...
|
||||
@overload
|
||||
def subn(
|
||||
pattern: bytes | Pattern[bytes],
|
||||
repl: ReadableBuffer | Callable[[Match[bytes]], ReadableBuffer],
|
||||
string: ReadableBuffer,
|
||||
count: int = 0,
|
||||
flags: _FlagsType = 0,
|
||||
) -> tuple[bytes, int]: ...
|
||||
def escape(pattern: AnyStr) -> AnyStr: ...
|
||||
def purge() -> None: ...
|
||||
|
||||
if sys.version_info < (3, 13):
|
||||
def template(pattern: AnyStr | Pattern[AnyStr], flags: _FlagsType = 0) -> Pattern[AnyStr]: ...
|
||||
10
.venv/lib/python3.12/site-packages/stdlib/sre_compile.pyi
Normal file
10
.venv/lib/python3.12/site-packages/stdlib/sre_compile.pyi
Normal file
@@ -0,0 +1,10 @@
|
||||
from re import Pattern
|
||||
from sre_constants import _NamedIntConstant
|
||||
from sre_parse import SubPattern
|
||||
from typing import Any
|
||||
|
||||
MAXCODE: int
|
||||
|
||||
def dis(code: list[_NamedIntConstant]) -> None: ...
|
||||
def isstring(obj: Any) -> bool: ...
|
||||
def compile(p: str | bytes | SubPattern, flags: int = 0) -> Pattern[Any]: ...
|
||||
123
.venv/lib/python3.12/site-packages/stdlib/sre_constants.pyi
Normal file
123
.venv/lib/python3.12/site-packages/stdlib/sre_constants.pyi
Normal file
@@ -0,0 +1,123 @@
|
||||
import sys
|
||||
from typing import Any
|
||||
from typing_extensions import Self
|
||||
|
||||
MAXGROUPS: int
|
||||
|
||||
MAGIC: int
|
||||
|
||||
class _NamedIntConstant(int):
|
||||
name: Any
|
||||
def __new__(cls, value: int, name: str) -> Self: ...
|
||||
|
||||
MAXREPEAT: _NamedIntConstant
|
||||
OPCODES: list[_NamedIntConstant]
|
||||
ATCODES: list[_NamedIntConstant]
|
||||
CHCODES: list[_NamedIntConstant]
|
||||
OP_IGNORE: dict[_NamedIntConstant, _NamedIntConstant]
|
||||
OP_LOCALE_IGNORE: dict[_NamedIntConstant, _NamedIntConstant]
|
||||
OP_UNICODE_IGNORE: dict[_NamedIntConstant, _NamedIntConstant]
|
||||
AT_MULTILINE: dict[_NamedIntConstant, _NamedIntConstant]
|
||||
AT_LOCALE: dict[_NamedIntConstant, _NamedIntConstant]
|
||||
AT_UNICODE: dict[_NamedIntConstant, _NamedIntConstant]
|
||||
CH_LOCALE: dict[_NamedIntConstant, _NamedIntConstant]
|
||||
CH_UNICODE: dict[_NamedIntConstant, _NamedIntConstant]
|
||||
if sys.version_info < (3, 13):
|
||||
SRE_FLAG_TEMPLATE: int
|
||||
SRE_FLAG_IGNORECASE: int
|
||||
SRE_FLAG_LOCALE: int
|
||||
SRE_FLAG_MULTILINE: int
|
||||
SRE_FLAG_DOTALL: int
|
||||
SRE_FLAG_UNICODE: int
|
||||
SRE_FLAG_VERBOSE: int
|
||||
SRE_FLAG_DEBUG: int
|
||||
SRE_FLAG_ASCII: int
|
||||
SRE_INFO_PREFIX: int
|
||||
SRE_INFO_LITERAL: int
|
||||
SRE_INFO_CHARSET: int
|
||||
|
||||
# Stubgen above; manually defined constants below (dynamic at runtime)
|
||||
|
||||
# from OPCODES
|
||||
FAILURE: _NamedIntConstant
|
||||
SUCCESS: _NamedIntConstant
|
||||
ANY: _NamedIntConstant
|
||||
ANY_ALL: _NamedIntConstant
|
||||
ASSERT: _NamedIntConstant
|
||||
ASSERT_NOT: _NamedIntConstant
|
||||
AT: _NamedIntConstant
|
||||
BRANCH: _NamedIntConstant
|
||||
if sys.version_info < (3, 11):
|
||||
CALL: _NamedIntConstant
|
||||
CATEGORY: _NamedIntConstant
|
||||
CHARSET: _NamedIntConstant
|
||||
BIGCHARSET: _NamedIntConstant
|
||||
GROUPREF: _NamedIntConstant
|
||||
GROUPREF_EXISTS: _NamedIntConstant
|
||||
GROUPREF_IGNORE: _NamedIntConstant
|
||||
IN: _NamedIntConstant
|
||||
IN_IGNORE: _NamedIntConstant
|
||||
INFO: _NamedIntConstant
|
||||
JUMP: _NamedIntConstant
|
||||
LITERAL: _NamedIntConstant
|
||||
LITERAL_IGNORE: _NamedIntConstant
|
||||
MARK: _NamedIntConstant
|
||||
MAX_UNTIL: _NamedIntConstant
|
||||
MIN_UNTIL: _NamedIntConstant
|
||||
NOT_LITERAL: _NamedIntConstant
|
||||
NOT_LITERAL_IGNORE: _NamedIntConstant
|
||||
NEGATE: _NamedIntConstant
|
||||
RANGE: _NamedIntConstant
|
||||
REPEAT: _NamedIntConstant
|
||||
REPEAT_ONE: _NamedIntConstant
|
||||
SUBPATTERN: _NamedIntConstant
|
||||
MIN_REPEAT_ONE: _NamedIntConstant
|
||||
if sys.version_info >= (3, 11):
|
||||
ATOMIC_GROUP: _NamedIntConstant
|
||||
POSSESSIVE_REPEAT: _NamedIntConstant
|
||||
POSSESSIVE_REPEAT_ONE: _NamedIntConstant
|
||||
RANGE_UNI_IGNORE: _NamedIntConstant
|
||||
GROUPREF_LOC_IGNORE: _NamedIntConstant
|
||||
GROUPREF_UNI_IGNORE: _NamedIntConstant
|
||||
IN_LOC_IGNORE: _NamedIntConstant
|
||||
IN_UNI_IGNORE: _NamedIntConstant
|
||||
LITERAL_LOC_IGNORE: _NamedIntConstant
|
||||
LITERAL_UNI_IGNORE: _NamedIntConstant
|
||||
NOT_LITERAL_LOC_IGNORE: _NamedIntConstant
|
||||
NOT_LITERAL_UNI_IGNORE: _NamedIntConstant
|
||||
MIN_REPEAT: _NamedIntConstant
|
||||
MAX_REPEAT: _NamedIntConstant
|
||||
|
||||
# from ATCODES
|
||||
AT_BEGINNING: _NamedIntConstant
|
||||
AT_BEGINNING_LINE: _NamedIntConstant
|
||||
AT_BEGINNING_STRING: _NamedIntConstant
|
||||
AT_BOUNDARY: _NamedIntConstant
|
||||
AT_NON_BOUNDARY: _NamedIntConstant
|
||||
AT_END: _NamedIntConstant
|
||||
AT_END_LINE: _NamedIntConstant
|
||||
AT_END_STRING: _NamedIntConstant
|
||||
AT_LOC_BOUNDARY: _NamedIntConstant
|
||||
AT_LOC_NON_BOUNDARY: _NamedIntConstant
|
||||
AT_UNI_BOUNDARY: _NamedIntConstant
|
||||
AT_UNI_NON_BOUNDARY: _NamedIntConstant
|
||||
|
||||
# from CHCODES
|
||||
CATEGORY_DIGIT: _NamedIntConstant
|
||||
CATEGORY_NOT_DIGIT: _NamedIntConstant
|
||||
CATEGORY_SPACE: _NamedIntConstant
|
||||
CATEGORY_NOT_SPACE: _NamedIntConstant
|
||||
CATEGORY_WORD: _NamedIntConstant
|
||||
CATEGORY_NOT_WORD: _NamedIntConstant
|
||||
CATEGORY_LINEBREAK: _NamedIntConstant
|
||||
CATEGORY_NOT_LINEBREAK: _NamedIntConstant
|
||||
CATEGORY_LOC_WORD: _NamedIntConstant
|
||||
CATEGORY_LOC_NOT_WORD: _NamedIntConstant
|
||||
CATEGORY_UNI_DIGIT: _NamedIntConstant
|
||||
CATEGORY_UNI_NOT_DIGIT: _NamedIntConstant
|
||||
CATEGORY_UNI_SPACE: _NamedIntConstant
|
||||
CATEGORY_UNI_NOT_SPACE: _NamedIntConstant
|
||||
CATEGORY_UNI_WORD: _NamedIntConstant
|
||||
CATEGORY_UNI_NOT_WORD: _NamedIntConstant
|
||||
CATEGORY_UNI_LINEBREAK: _NamedIntConstant
|
||||
CATEGORY_UNI_NOT_LINEBREAK: _NamedIntConstant
|
||||
104
.venv/lib/python3.12/site-packages/stdlib/sre_parse.pyi
Normal file
104
.venv/lib/python3.12/site-packages/stdlib/sre_parse.pyi
Normal file
@@ -0,0 +1,104 @@
|
||||
import sys
|
||||
from collections.abc import Iterable
|
||||
from re import Match, Pattern as _Pattern
|
||||
from sre_constants import *
|
||||
from sre_constants import _NamedIntConstant as _NIC, error as _Error
|
||||
from typing import Any, overload
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
SPECIAL_CHARS: str
|
||||
REPEAT_CHARS: str
|
||||
DIGITS: frozenset[str]
|
||||
OCTDIGITS: frozenset[str]
|
||||
HEXDIGITS: frozenset[str]
|
||||
ASCIILETTERS: frozenset[str]
|
||||
WHITESPACE: frozenset[str]
|
||||
ESCAPES: dict[str, tuple[_NIC, int]]
|
||||
CATEGORIES: dict[str, tuple[_NIC, _NIC] | tuple[_NIC, list[tuple[_NIC, _NIC]]]]
|
||||
FLAGS: dict[str, int]
|
||||
TYPE_FLAGS: int
|
||||
GLOBAL_FLAGS: int
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
MAXWIDTH: int
|
||||
|
||||
if sys.version_info < (3, 11):
|
||||
class Verbose(Exception): ...
|
||||
|
||||
_OpSubpatternType: TypeAlias = tuple[int | None, int, int, SubPattern]
|
||||
_OpGroupRefExistsType: TypeAlias = tuple[int, SubPattern, SubPattern]
|
||||
_OpInType: TypeAlias = list[tuple[_NIC, int]]
|
||||
_OpBranchType: TypeAlias = tuple[None, list[SubPattern]]
|
||||
_AvType: TypeAlias = _OpInType | _OpBranchType | Iterable[SubPattern] | _OpGroupRefExistsType | _OpSubpatternType
|
||||
_CodeType: TypeAlias = tuple[_NIC, _AvType]
|
||||
|
||||
class State:
|
||||
flags: int
|
||||
groupdict: dict[str, int]
|
||||
groupwidths: list[int | None]
|
||||
lookbehindgroups: int | None
|
||||
@property
|
||||
def groups(self) -> int: ...
|
||||
def opengroup(self, name: str | None = ...) -> int: ...
|
||||
def closegroup(self, gid: int, p: SubPattern) -> None: ...
|
||||
def checkgroup(self, gid: int) -> bool: ...
|
||||
def checklookbehindgroup(self, gid: int, source: Tokenizer) -> None: ...
|
||||
|
||||
class SubPattern:
|
||||
data: list[_CodeType]
|
||||
width: int | None
|
||||
state: State
|
||||
|
||||
def __init__(self, state: State, data: list[_CodeType] | None = None) -> None: ...
|
||||
def dump(self, level: int = 0) -> None: ...
|
||||
def __len__(self) -> int: ...
|
||||
def __delitem__(self, index: int | slice) -> None: ...
|
||||
def __getitem__(self, index: int | slice) -> SubPattern | _CodeType: ...
|
||||
def __setitem__(self, index: int | slice, code: _CodeType) -> None: ...
|
||||
def insert(self, index: int, code: _CodeType) -> None: ...
|
||||
def append(self, code: _CodeType) -> None: ...
|
||||
def getwidth(self) -> tuple[int, int]: ...
|
||||
|
||||
class Tokenizer:
|
||||
istext: bool
|
||||
string: Any
|
||||
decoded_string: str
|
||||
index: int
|
||||
next: str | None
|
||||
def __init__(self, string: Any) -> None: ...
|
||||
def match(self, char: str) -> bool: ...
|
||||
def get(self) -> str | None: ...
|
||||
def getwhile(self, n: int, charset: Iterable[str]) -> str: ...
|
||||
def getuntil(self, terminator: str, name: str) -> str: ...
|
||||
@property
|
||||
def pos(self) -> int: ...
|
||||
def tell(self) -> int: ...
|
||||
def seek(self, index: int) -> None: ...
|
||||
def error(self, msg: str, offset: int = 0) -> _Error: ...
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
def checkgroupname(self, name: str, offset: int) -> None: ...
|
||||
elif sys.version_info >= (3, 11):
|
||||
def checkgroupname(self, name: str, offset: int, nested: int) -> None: ...
|
||||
|
||||
def fix_flags(src: str | bytes, flags: int) -> int: ...
|
||||
|
||||
_TemplateType: TypeAlias = tuple[list[tuple[int, int]], list[str | None]]
|
||||
_TemplateByteType: TypeAlias = tuple[list[tuple[int, int]], list[bytes | None]]
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
@overload
|
||||
def parse_template(source: str, pattern: _Pattern[Any]) -> _TemplateType: ...
|
||||
@overload
|
||||
def parse_template(source: bytes, pattern: _Pattern[Any]) -> _TemplateByteType: ...
|
||||
|
||||
else:
|
||||
@overload
|
||||
def parse_template(source: str, state: _Pattern[Any]) -> _TemplateType: ...
|
||||
@overload
|
||||
def parse_template(source: bytes, state: _Pattern[Any]) -> _TemplateByteType: ...
|
||||
|
||||
def parse(str: str, flags: int = 0, state: State | None = None) -> SubPattern: ...
|
||||
|
||||
if sys.version_info < (3, 12):
|
||||
def expand_template(template: _TemplateType, match: Match[Any]) -> str: ...
|
||||
643
.venv/lib/python3.12/site-packages/stdlib/ssl.pyi
Normal file
643
.venv/lib/python3.12/site-packages/stdlib/ssl.pyi
Normal file
@@ -0,0 +1,643 @@
|
||||
"""
|
||||
TLS/SSL wrapper for socket objects.
|
||||
|
||||
MicroPython module: https://docs.micropython.org/en/v1.26.0/library/ssl.html
|
||||
|
||||
CPython module: :mod:`python:ssl` https://docs.python.org/3/library/ssl.html .
|
||||
|
||||
This module provides access to Transport Layer Security (previously and
|
||||
widely known as “Secure Sockets Layer”) encryption and peer authentication
|
||||
facilities for network sockets, both client-side and server-side.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
import enum
|
||||
import socket
|
||||
import sys
|
||||
from _ssl import (
|
||||
_DEFAULT_CIPHERS as _DEFAULT_CIPHERS,
|
||||
_OPENSSL_API_VERSION as _OPENSSL_API_VERSION,
|
||||
HAS_ALPN as HAS_ALPN,
|
||||
HAS_ECDH as HAS_ECDH,
|
||||
HAS_NPN as HAS_NPN,
|
||||
HAS_SNI as HAS_SNI,
|
||||
OPENSSL_VERSION as OPENSSL_VERSION,
|
||||
OPENSSL_VERSION_INFO as OPENSSL_VERSION_INFO,
|
||||
OPENSSL_VERSION_NUMBER as OPENSSL_VERSION_NUMBER,
|
||||
HAS_SSLv2 as HAS_SSLv2,
|
||||
HAS_SSLv3 as HAS_SSLv3,
|
||||
HAS_TLSv1 as HAS_TLSv1,
|
||||
HAS_TLSv1_1 as HAS_TLSv1_1,
|
||||
HAS_TLSv1_2 as HAS_TLSv1_2,
|
||||
HAS_TLSv1_3 as HAS_TLSv1_3,
|
||||
MemoryBIO as MemoryBIO,
|
||||
RAND_add as RAND_add,
|
||||
RAND_bytes as RAND_bytes,
|
||||
RAND_status as RAND_status,
|
||||
SSLSession as SSLSession,
|
||||
_PasswordType as _PasswordType, # typeshed only, but re-export for other type stubs to use
|
||||
_SSLContext,
|
||||
)
|
||||
from _typeshed import Incomplete, ReadableBuffer, StrOrBytesPath, WriteableBuffer
|
||||
from collections.abc import Callable, Iterable
|
||||
from typing import Any, Literal, NamedTuple, TypedDict, overload
|
||||
from typing_extensions import Awaitable, TypeVar, Never, Self, TypeAlias
|
||||
from _mpy_shed import StrOrBytesPath, mp_available
|
||||
from tls import *
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
from _ssl import HAS_PSK as HAS_PSK
|
||||
|
||||
if True:
|
||||
from _ssl import RAND_pseudo_bytes as RAND_pseudo_bytes
|
||||
|
||||
if sys.version_info < (3, 10):
|
||||
from _ssl import RAND_egd as RAND_egd
|
||||
|
||||
if sys.platform == "win32":
|
||||
from _ssl import enum_certificates as enum_certificates, enum_crls as enum_crls
|
||||
|
||||
_PCTRTT: TypeAlias = tuple[tuple[str, str], ...]
|
||||
_PCTRTTT: TypeAlias = tuple[_PCTRTT, ...]
|
||||
_PeerCertRetDictType: TypeAlias = dict[str, str | _PCTRTTT | _PCTRTT]
|
||||
_PeerCertRetType: TypeAlias = _PeerCertRetDictType | bytes | None
|
||||
_SrvnmeCbType: TypeAlias = Callable[[SSLSocket | SSLObject, str | None, SSLSocket], int | None]
|
||||
|
||||
# socket_error = OSError
|
||||
|
||||
class _Cipher(TypedDict):
|
||||
aead: bool
|
||||
alg_bits: int
|
||||
auth: str
|
||||
description: str
|
||||
digest: str | None
|
||||
id: int
|
||||
kea: str
|
||||
name: str
|
||||
protocol: str
|
||||
strength_bits: int
|
||||
symmetric: str
|
||||
|
||||
class SSLError(OSError):
|
||||
library: str
|
||||
reason: str
|
||||
|
||||
class SSLZeroReturnError(SSLError): ...
|
||||
class SSLWantReadError(SSLError): ...
|
||||
class SSLWantWriteError(SSLError): ...
|
||||
class SSLSyscallError(SSLError): ...
|
||||
class SSLEOFError(SSLError): ...
|
||||
|
||||
class SSLCertVerificationError(SSLError, ValueError):
|
||||
verify_code: int
|
||||
verify_message: str
|
||||
|
||||
# CertificateError = SSLCertVerificationError
|
||||
|
||||
if True:
|
||||
|
||||
# -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
|
||||
# End duplicated section
|
||||
# -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
|
||||
|
||||
@mp_available() # force merge
|
||||
def wrap_socket(
|
||||
sock: socket.socket,
|
||||
*,
|
||||
server_side: bool = False,
|
||||
key: Incomplete = None,
|
||||
cert: Incomplete = None,
|
||||
cert_reqs: int = 0,
|
||||
cadata: bytes | None = None,
|
||||
server_hostname: str | None = None,
|
||||
do_handshake: bool = True,
|
||||
) -> SSLSocket:
|
||||
"""
|
||||
Wrap the given *sock* and return a new wrapped-socket object. The implementation
|
||||
of this function is to first create an `SSLContext` and then call the `SSLContext.wrap_socket`
|
||||
method on that context object. The arguments *sock*, *server_side* and *server_hostname* are
|
||||
passed through unchanged to the method call. The argument *do_handshake* is passed through as
|
||||
*do_handshake_on_connect*. The remaining arguments have the following behaviour:
|
||||
|
||||
- *cert_reqs* determines whether the peer (server or client) must present a valid certificate.
|
||||
Note that for mbedtls based ports, ``ssl.CERT_NONE`` and ``ssl.CERT_OPTIONAL`` will not
|
||||
validate any certificate, only ``ssl.CERT_REQUIRED`` will.
|
||||
|
||||
- *cadata* is a bytes object containing the CA certificate chain (in DER format) that will
|
||||
validate the peer's certificate. Currently only a single DER-encoded certificate is supported.
|
||||
|
||||
Depending on the underlying module implementation in a particular
|
||||
:term:`MicroPython port`, some or all keyword arguments above may be not supported.
|
||||
"""
|
||||
...
|
||||
|
||||
def __mpy_has_no_create_default_context(
|
||||
purpose: Purpose = ...,
|
||||
*,
|
||||
cafile: StrOrBytesPath | None = None,
|
||||
capath: StrOrBytesPath | None = None,
|
||||
cadata: str | ReadableBuffer | None = None,
|
||||
) -> SSLContext: ...
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
def _create_unverified_context(
|
||||
protocol: int | None = None,
|
||||
*,
|
||||
cert_reqs: int = ...,
|
||||
check_hostname: bool = False,
|
||||
purpose: Purpose = ...,
|
||||
certfile: StrOrBytesPath | None = None,
|
||||
keyfile: StrOrBytesPath | None = None,
|
||||
cafile: StrOrBytesPath | None = None,
|
||||
capath: StrOrBytesPath | None = None,
|
||||
cadata: str | ReadableBuffer | None = None,
|
||||
) -> SSLContext: ...
|
||||
|
||||
else:
|
||||
def _create_unverified_context(
|
||||
protocol: int = ...,
|
||||
*,
|
||||
cert_reqs: int = ...,
|
||||
check_hostname: bool = False,
|
||||
purpose: Purpose = ...,
|
||||
certfile: StrOrBytesPath | None = None,
|
||||
keyfile: StrOrBytesPath | None = None,
|
||||
cafile: StrOrBytesPath | None = None,
|
||||
capath: StrOrBytesPath | None = None,
|
||||
cadata: str | ReadableBuffer | None = None,
|
||||
) -> SSLContext: ...
|
||||
|
||||
_create_default_https_context: Callable[..., SSLContext]
|
||||
|
||||
if True:
|
||||
def match_hostname(cert: _PeerCertRetDictType, hostname: str) -> None: ...
|
||||
|
||||
def cert_time_to_seconds(cert_time: str) -> int: ...
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
def get_server_certificate(addr: tuple[str, int], ssl_version: int = ..., ca_certs: str | None = None, timeout: float = ...) -> str: ...
|
||||
|
||||
else:
|
||||
def get_server_certificate(addr: tuple[str, int], ssl_version: int = ..., ca_certs: str | None = None) -> str: ...
|
||||
|
||||
def DER_cert_to_PEM_cert(der_cert_bytes: ReadableBuffer) -> str: ...
|
||||
def PEM_cert_to_DER_cert(pem_cert_string: str) -> bytes: ...
|
||||
|
||||
class DefaultVerifyPaths(NamedTuple):
|
||||
cafile: str
|
||||
capath: str
|
||||
openssl_cafile_env: str
|
||||
openssl_cafile: str
|
||||
openssl_capath_env: str
|
||||
openssl_capath: str
|
||||
|
||||
def get_default_verify_paths() -> DefaultVerifyPaths: ...
|
||||
|
||||
class VerifyMode(enum.IntEnum):
|
||||
CERT_NONE = 0
|
||||
CERT_OPTIONAL = 1
|
||||
CERT_REQUIRED = 2
|
||||
|
||||
CERT_NONE: VerifyMode
|
||||
CERT_OPTIONAL: VerifyMode
|
||||
CERT_REQUIRED: VerifyMode
|
||||
|
||||
class VerifyFlags(enum.IntFlag):
|
||||
VERIFY_DEFAULT = 0
|
||||
VERIFY_CRL_CHECK_LEAF = 4
|
||||
VERIFY_CRL_CHECK_CHAIN = 12
|
||||
VERIFY_X509_STRICT = 32
|
||||
VERIFY_X509_TRUSTED_FIRST = 32768
|
||||
if sys.version_info >= (3, 10):
|
||||
VERIFY_ALLOW_PROXY_CERTS = 64
|
||||
VERIFY_X509_PARTIAL_CHAIN = 524288
|
||||
|
||||
# VERIFY_DEFAULT: VerifyFlags
|
||||
# VERIFY_CRL_CHECK_LEAF: VerifyFlags
|
||||
# VERIFY_CRL_CHECK_CHAIN: VerifyFlags
|
||||
# VERIFY_X509_STRICT: VerifyFlags
|
||||
# VERIFY_X509_TRUSTED_FIRST: VerifyFlags
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
VERIFY_ALLOW_PROXY_CERTS: VerifyFlags
|
||||
VERIFY_X509_PARTIAL_CHAIN: VerifyFlags
|
||||
|
||||
class _SSLMethod(enum.IntEnum):
|
||||
PROTOCOL_SSLv23 = 2
|
||||
PROTOCOL_SSLv2 = ...
|
||||
PROTOCOL_SSLv3 = ...
|
||||
PROTOCOL_TLSv1 = 3
|
||||
PROTOCOL_TLSv1_1 = 4
|
||||
PROTOCOL_TLSv1_2 = 5
|
||||
PROTOCOL_TLS = 2
|
||||
PROTOCOL_TLS_CLIENT = 16
|
||||
PROTOCOL_TLS_SERVER = 17
|
||||
|
||||
# PROTOCOL_SSLv23: _SSLMethod
|
||||
# PROTOCOL_SSLv2: _SSLMethod
|
||||
# PROTOCOL_SSLv3: _SSLMethod
|
||||
# PROTOCOL_TLSv1: _SSLMethod
|
||||
# PROTOCOL_TLSv1_1: _SSLMethod
|
||||
# PROTOCOL_TLSv1_2: _SSLMethod
|
||||
# PROTOCOL_TLS: _SSLMethod
|
||||
PROTOCOL_TLS_CLIENT: _SSLMethod
|
||||
PROTOCOL_TLS_SERVER: _SSLMethod
|
||||
|
||||
class Options(enum.IntFlag):
|
||||
OP_ALL = 2147483728
|
||||
OP_NO_SSLv2 = 0
|
||||
OP_NO_SSLv3 = 33554432
|
||||
OP_NO_TLSv1 = 67108864
|
||||
OP_NO_TLSv1_1 = 268435456
|
||||
OP_NO_TLSv1_2 = 134217728
|
||||
OP_NO_TLSv1_3 = 536870912
|
||||
OP_CIPHER_SERVER_PREFERENCE = 4194304
|
||||
OP_SINGLE_DH_USE = 0
|
||||
OP_SINGLE_ECDH_USE = 0
|
||||
OP_NO_COMPRESSION = 131072
|
||||
OP_NO_TICKET = 16384
|
||||
OP_NO_RENEGOTIATION = 1073741824
|
||||
OP_ENABLE_MIDDLEBOX_COMPAT = 1048576
|
||||
if sys.version_info >= (3, 12):
|
||||
OP_LEGACY_SERVER_CONNECT = 4
|
||||
OP_ENABLE_KTLS = 8
|
||||
if sys.version_info >= (3, 11) or sys.platform == "linux":
|
||||
OP_IGNORE_UNEXPECTED_EOF = 128
|
||||
|
||||
# OP_ALL: Options
|
||||
# OP_NO_SSLv2: Options
|
||||
# OP_NO_SSLv3: Options
|
||||
# OP_NO_TLSv1: Options
|
||||
# OP_NO_TLSv1_1: Options
|
||||
# OP_NO_TLSv1_2: Options
|
||||
# OP_NO_TLSv1_3: Options
|
||||
# OP_CIPHER_SERVER_PREFERENCE: Options
|
||||
# OP_SINGLE_DH_USE: Options
|
||||
# OP_SINGLE_ECDH_USE: Options
|
||||
# OP_NO_COMPRESSION: Options
|
||||
# OP_NO_TICKET: Options
|
||||
# OP_NO_RENEGOTIATION: Options
|
||||
# OP_ENABLE_MIDDLEBOX_COMPAT: Options
|
||||
if sys.version_info >= (3, 12):
|
||||
OP_LEGACY_SERVER_CONNECT: Options
|
||||
OP_ENABLE_KTLS: Options
|
||||
if sys.version_info >= (3, 11) or sys.platform == "linux":
|
||||
OP_IGNORE_UNEXPECTED_EOF: Options
|
||||
|
||||
# HAS_NEVER_CHECK_COMMON_NAME: bool
|
||||
|
||||
# CHANNEL_BINDING_TYPES: list[str]
|
||||
|
||||
class AlertDescription(enum.IntEnum):
|
||||
ALERT_DESCRIPTION_ACCESS_DENIED = 49
|
||||
ALERT_DESCRIPTION_BAD_CERTIFICATE = 42
|
||||
ALERT_DESCRIPTION_BAD_CERTIFICATE_HASH_VALUE = 114
|
||||
ALERT_DESCRIPTION_BAD_CERTIFICATE_STATUS_RESPONSE = 113
|
||||
ALERT_DESCRIPTION_BAD_RECORD_MAC = 20
|
||||
ALERT_DESCRIPTION_CERTIFICATE_EXPIRED = 45
|
||||
ALERT_DESCRIPTION_CERTIFICATE_REVOKED = 44
|
||||
ALERT_DESCRIPTION_CERTIFICATE_UNKNOWN = 46
|
||||
ALERT_DESCRIPTION_CERTIFICATE_UNOBTAINABLE = 111
|
||||
ALERT_DESCRIPTION_CLOSE_NOTIFY = 0
|
||||
ALERT_DESCRIPTION_DECODE_ERROR = 50
|
||||
ALERT_DESCRIPTION_DECOMPRESSION_FAILURE = 30
|
||||
ALERT_DESCRIPTION_DECRYPT_ERROR = 51
|
||||
ALERT_DESCRIPTION_HANDSHAKE_FAILURE = 40
|
||||
ALERT_DESCRIPTION_ILLEGAL_PARAMETER = 47
|
||||
ALERT_DESCRIPTION_INSUFFICIENT_SECURITY = 71
|
||||
ALERT_DESCRIPTION_INTERNAL_ERROR = 80
|
||||
ALERT_DESCRIPTION_NO_RENEGOTIATION = 100
|
||||
ALERT_DESCRIPTION_PROTOCOL_VERSION = 70
|
||||
ALERT_DESCRIPTION_RECORD_OVERFLOW = 22
|
||||
ALERT_DESCRIPTION_UNEXPECTED_MESSAGE = 10
|
||||
ALERT_DESCRIPTION_UNKNOWN_CA = 48
|
||||
ALERT_DESCRIPTION_UNKNOWN_PSK_IDENTITY = 115
|
||||
ALERT_DESCRIPTION_UNRECOGNIZED_NAME = 112
|
||||
ALERT_DESCRIPTION_UNSUPPORTED_CERTIFICATE = 43
|
||||
ALERT_DESCRIPTION_UNSUPPORTED_EXTENSION = 110
|
||||
ALERT_DESCRIPTION_USER_CANCELLED = 90
|
||||
|
||||
# ALERT_DESCRIPTION_HANDSHAKE_FAILURE: AlertDescription
|
||||
# ALERT_DESCRIPTION_INTERNAL_ERROR: AlertDescription
|
||||
# ALERT_DESCRIPTION_ACCESS_DENIED: AlertDescription
|
||||
# ALERT_DESCRIPTION_BAD_CERTIFICATE: AlertDescription
|
||||
# ALERT_DESCRIPTION_BAD_CERTIFICATE_HASH_VALUE: AlertDescription
|
||||
# ALERT_DESCRIPTION_BAD_CERTIFICATE_STATUS_RESPONSE: AlertDescription
|
||||
# ALERT_DESCRIPTION_BAD_RECORD_MAC: AlertDescription
|
||||
# ALERT_DESCRIPTION_CERTIFICATE_EXPIRED: AlertDescription
|
||||
# ALERT_DESCRIPTION_CERTIFICATE_REVOKED: AlertDescription
|
||||
# ALERT_DESCRIPTION_CERTIFICATE_UNKNOWN: AlertDescription
|
||||
# ALERT_DESCRIPTION_CERTIFICATE_UNOBTAINABLE: AlertDescription
|
||||
# ALERT_DESCRIPTION_CLOSE_NOTIFY: AlertDescription
|
||||
# ALERT_DESCRIPTION_DECODE_ERROR: AlertDescription
|
||||
# ALERT_DESCRIPTION_DECOMPRESSION_FAILURE: AlertDescription
|
||||
# ALERT_DESCRIPTION_DECRYPT_ERROR: AlertDescription
|
||||
# ALERT_DESCRIPTION_ILLEGAL_PARAMETER: AlertDescription
|
||||
# ALERT_DESCRIPTION_INSUFFICIENT_SECURITY: AlertDescription
|
||||
# ALERT_DESCRIPTION_NO_RENEGOTIATION: AlertDescription
|
||||
# ALERT_DESCRIPTION_PROTOCOL_VERSION: AlertDescription
|
||||
# ALERT_DESCRIPTION_RECORD_OVERFLOW: AlertDescription
|
||||
# ALERT_DESCRIPTION_UNEXPECTED_MESSAGE: AlertDescription
|
||||
# ALERT_DESCRIPTION_UNKNOWN_CA: AlertDescription
|
||||
# ALERT_DESCRIPTION_UNKNOWN_PSK_IDENTITY: AlertDescription
|
||||
# ALERT_DESCRIPTION_UNRECOGNIZED_NAME: AlertDescription
|
||||
# ALERT_DESCRIPTION_UNSUPPORTED_CERTIFICATE: AlertDescription
|
||||
# ALERT_DESCRIPTION_UNSUPPORTED_EXTENSION: AlertDescription
|
||||
# ALERT_DESCRIPTION_USER_CANCELLED: AlertDescription
|
||||
|
||||
class _ASN1ObjectBase(NamedTuple):
|
||||
nid: int
|
||||
shortname: str
|
||||
longname: str
|
||||
oid: str
|
||||
|
||||
class _ASN1Object(_ASN1ObjectBase):
|
||||
def __new__(cls, oid: str) -> Self: ...
|
||||
@classmethod
|
||||
def fromnid(cls, nid: int) -> Self: ...
|
||||
@classmethod
|
||||
def fromname(cls, name: str) -> Self: ...
|
||||
|
||||
class Purpose(_ASN1Object, enum.Enum):
|
||||
SERVER_AUTH = (129, "serverAuth", "TLS Web Server Authentication", "1.3.6.1.5.5.7.3.2") # pyright: ignore[reportCallIssue]
|
||||
CLIENT_AUTH = (130, "clientAuth", "TLS Web Client Authentication", "1.3.6.1.5.5.7.3.1") # pyright: ignore[reportCallIssue]
|
||||
|
||||
class SSLSocket:
|
||||
context: SSLContext
|
||||
server_side: bool
|
||||
server_hostname: str | None
|
||||
session: SSLSession | None
|
||||
@property
|
||||
def session_reused(self) -> bool | None: ...
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None: ...
|
||||
def connect(self, addr: socket._Address) -> None: ...
|
||||
def connect_ex(self, addr: socket._Address) -> int: ...
|
||||
# ifdef MBEDTLS_SSL_PROTO_DTLS
|
||||
@mp_available(macro="MBEDTLS_SSL_PROTO_DTLS") # force merge
|
||||
def recv(self, *argv, **kwargs) -> Incomplete: ...
|
||||
@mp_available(macro="MBEDTLS_SSL_PROTO_DTLS") # force merge
|
||||
def recv_into(self, *argv, **kwargs) -> Incomplete: ...
|
||||
def recvfrom(self, buflen: int = 1024, flags: int = 0) -> tuple[bytes, socket._RetAddress]: ...
|
||||
def recvfrom_into(self, buffer: WriteableBuffer, nbytes: int | None = None, flags: int = 0) -> tuple[int, socket._RetAddress]: ...
|
||||
@mp_available(macro="MBEDTLS_SSL_PROTO_DTLS") # force merge
|
||||
def send(self, *argv, **kwargs) -> Incomplete: ...
|
||||
@mp_available(macro="MBEDTLS_SSL_PROTO_DTLS") # force merge
|
||||
def sendall(self, *argv, **kwargs) -> Incomplete: ...
|
||||
@overload
|
||||
def sendto(self, data: ReadableBuffer, flags_or_addr: socket._Address, addr: None = None) -> int: ...
|
||||
@overload
|
||||
def sendto(self, data: ReadableBuffer, flags_or_addr: int, addr: socket._Address) -> int: ...
|
||||
def shutdown(self, how: int) -> None: ...
|
||||
# TODO : SSLSocket is undocumented
|
||||
# ref: micropython\extmod\modtls_axtls.c ( read ... close)
|
||||
|
||||
# repos\micropython\extmod\modtls_mbedtls.c
|
||||
@mp_available() # force merge
|
||||
def read(self, *argv, **kwargs) -> Incomplete: ...
|
||||
@mp_available() # force merge
|
||||
def write(self, *argv, **kwargs) -> Incomplete: ...
|
||||
def do_handshake(self, block: bool = False) -> None: ... # block is undocumented
|
||||
@overload
|
||||
def getpeercert(self, binary_form: Literal[False] = False) -> _PeerCertRetDictType | None: ...
|
||||
@overload
|
||||
def getpeercert(self, binary_form: Literal[True]) -> bytes | None: ...
|
||||
@overload
|
||||
def getpeercert(self, binary_form: bool) -> _PeerCertRetType: ...
|
||||
# endif
|
||||
# ifdef (MBEDTLS_SSL_KEEP_PEER_CERTIFICATE)
|
||||
@mp_available(macro="MBEDTLS_SSL_KEEP_PEER_CERTIFICATE") # force merge
|
||||
def getpeercert(self, *argv, **kwargs) -> Incomplete: ...
|
||||
# endif
|
||||
@mp_available() # force merge
|
||||
def cipher(self, *argv, **kwargs) -> Incomplete: ...
|
||||
def shared_ciphers(self) -> list[tuple[str, str, int]] | None: ...
|
||||
def compression(self) -> str | None: ...
|
||||
def get_channel_binding(self, cb_type: str = "tls-unique") -> bytes | None: ...
|
||||
def selected_alpn_protocol(self) -> str | None: ...
|
||||
def selected_npn_protocol(self) -> str | None: ...
|
||||
def accept(self) -> tuple[SSLSocket, socket._RetAddress]: ...
|
||||
def unwrap(self) -> socket.socket: ...
|
||||
def version(self) -> str | None: ...
|
||||
def pending(self) -> int: ...
|
||||
def verify_client_post_handshake(self) -> None: ...
|
||||
# These methods always raise `NotImplementedError`:
|
||||
def recvmsg(self, *args: Never, **kwargs: Never) -> Never: ... # type: ignore[override]
|
||||
def recvmsg_into(self, *args: Never, **kwargs: Never) -> Never: ... # type: ignore[override]
|
||||
def sendmsg(self, *args: Never, **kwargs: Never) -> Never: ... # type: ignore[override]
|
||||
if sys.version_info >= (3, 13):
|
||||
def get_verified_chain(self) -> list[bytes]: ...
|
||||
def get_unverified_chain(self) -> list[bytes]: ...
|
||||
|
||||
@mp_available() # force merge
|
||||
def readinto(self, *argv, **kwargs) -> Incomplete: ...
|
||||
@mp_available() # force merge
|
||||
def readline(self, *argv, **kwargs) -> Incomplete: ...
|
||||
@mp_available() # force merge
|
||||
def setblocking(self, *argv, **kwargs) -> Incomplete: ...
|
||||
@mp_available() # force merge
|
||||
def close(self, *argv, **kwargs) -> Incomplete: ...
|
||||
# if MICROPY_PY_SSL_FINALISER
|
||||
@mp_available(macro="MICROPY_PY_SSL_FINALISER") # force merge
|
||||
def __del__(self, *argv, **kwargs) -> Incomplete: ...
|
||||
# endif
|
||||
# ifdef MICROPY_UNIX_COVERAGE
|
||||
@mp_available(macro="MICROPY_UNIX_COVERAGE") # force merge
|
||||
def ioctl(self, *argv, **kwargs) -> Incomplete: ...
|
||||
|
||||
class TLSVersion(enum.IntEnum):
|
||||
MINIMUM_SUPPORTED = -2
|
||||
MAXIMUM_SUPPORTED = -1
|
||||
SSLv3 = 768
|
||||
TLSv1 = 769
|
||||
TLSv1_1 = 770
|
||||
TLSv1_2 = 771
|
||||
TLSv1_3 = 772
|
||||
|
||||
class SSLContext:
|
||||
"""
|
||||
Create a new SSLContext instance. The *protocol* argument must be one of the ``PROTOCOL_*``
|
||||
constants.
|
||||
"""
|
||||
|
||||
options: Options
|
||||
verify_flags: VerifyFlags
|
||||
verify_mode: VerifyMode
|
||||
@property
|
||||
def protocol(self) -> _SSLMethod: ... # type: ignore[override]
|
||||
hostname_checks_common_name: bool
|
||||
maximum_version: TLSVersion
|
||||
minimum_version: TLSVersion
|
||||
# The following two attributes have class-level defaults.
|
||||
# However, the docs explicitly state that it's OK to override these attributes on instances,
|
||||
# so making these ClassVars wouldn't be appropriate
|
||||
sslobject_class: type[SSLObject]
|
||||
sslsocket_class: type[SSLSocket]
|
||||
keylog_filename: str
|
||||
post_handshake_auth: bool
|
||||
if sys.version_info >= (3, 10):
|
||||
security_level: int
|
||||
if sys.version_info >= (3, 10):
|
||||
# Using the default (None) for the `protocol` parameter is deprecated,
|
||||
# but there isn't a good way of marking that in the stub unless/until PEP 702 is accepted
|
||||
def __new__(cls, protocol: int | None = None, *args: Any, **kwargs: Any) -> Self: ...
|
||||
else:
|
||||
def __new__(cls, protocol: int = ..., *args: Any, **kwargs: Any) -> Self: ...
|
||||
|
||||
def load_default_certs(self, purpose: Purpose = ...) -> None: ...
|
||||
def load_verify_locations(self, cafile=None, cadata: bytes | None = None) -> None:
|
||||
"""
|
||||
Load the CA certificate chain that will validate the peer's certificate.
|
||||
*cafile* is the file path of the CA certificates. *cadata* is a bytes object
|
||||
containing the CA certificates. Only one of these arguments should be provided.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def get_ca_certs(self, binary_form: Literal[False] = False) -> list[_PeerCertRetDictType]: ...
|
||||
@overload
|
||||
def get_ca_certs(self, binary_form: Literal[True]) -> list[bytes]: ...
|
||||
@overload
|
||||
def get_ca_certs(self, binary_form: bool = False) -> Any: ...
|
||||
def get_ciphers(self) -> list[_Cipher]:
|
||||
"""
|
||||
Get a list of enabled ciphers, returned as a list of strings.
|
||||
"""
|
||||
...
|
||||
|
||||
def set_default_verify_paths(self) -> None: ...
|
||||
def set_ciphers(self, ciphers) -> None:
|
||||
"""
|
||||
Set the available ciphers for sockets created with this context. *ciphers* should be
|
||||
a list of strings in the `IANA cipher suite format <https://wiki.mozilla.org/Security/Cipher_Suites>`_ .
|
||||
"""
|
||||
...
|
||||
|
||||
def set_alpn_protocols(self, alpn_protocols: Iterable[str]) -> None: ...
|
||||
def set_npn_protocols(self, npn_protocols: Iterable[str]) -> None: ...
|
||||
def set_servername_callback(self, server_name_callback: _SrvnmeCbType | None) -> None: ...
|
||||
def load_dh_params(self, path: str, /) -> None: ...
|
||||
def set_ecdh_curve(self, name: str, /) -> None: ...
|
||||
def wrap_socket(
|
||||
self,
|
||||
sock: socket.socket,
|
||||
*,
|
||||
server_side: bool = False,
|
||||
do_handshake_on_connect: bool = True,
|
||||
server_hostname: str | None = None,
|
||||
) -> SSLSocket:
|
||||
"""
|
||||
Takes a `stream` *sock* (usually socket.socket instance of ``SOCK_STREAM`` type),
|
||||
and returns an instance of ssl.SSLSocket, wrapping the underlying stream.
|
||||
The returned object has the usual `stream` interface methods like
|
||||
``read()``, ``write()``, etc.
|
||||
|
||||
- *server_side* selects whether the wrapped socket is on the server or client side.
|
||||
A server-side SSL socket should be created from a normal socket returned from
|
||||
:meth:`~socket.socket.accept()` on a non-SSL listening server socket.
|
||||
|
||||
- *do_handshake_on_connect* determines whether the handshake is done as part of the ``wrap_socket``
|
||||
or whether it is deferred to be done as part of the initial reads or writes
|
||||
For blocking sockets doing the handshake immediately is standard. For non-blocking
|
||||
sockets (i.e. when the *sock* passed into ``wrap_socket`` is in non-blocking mode)
|
||||
the handshake should generally be deferred because otherwise ``wrap_socket`` blocks
|
||||
until it completes. Note that in AXTLS the handshake can be deferred until the first
|
||||
read or write but it then blocks until completion.
|
||||
|
||||
- *server_hostname* is for use as a client, and sets the hostname to check against the received
|
||||
server certificate. It also sets the name for Server Name Indication (SNI), allowing the server
|
||||
to present the proper certificate.
|
||||
|
||||
- *client_id* is a MicroPython-specific extension argument used only when implementing a DTLS
|
||||
Server. See :ref:`dtls` for details.
|
||||
"""
|
||||
...
|
||||
|
||||
def wrap_bio(
|
||||
self,
|
||||
incoming: MemoryBIO,
|
||||
outgoing: MemoryBIO,
|
||||
server_side: bool = False,
|
||||
server_hostname: str | bytes | None = None,
|
||||
session: SSLSession | None = None,
|
||||
) -> SSLObject: ...
|
||||
@mp_available() # force merge
|
||||
def load_cert_chain(self, certfile, keyfile) -> None:
|
||||
"""
|
||||
Load a private key and the corresponding certificate. The *certfile* is a string
|
||||
with the file path of the certificate. The *keyfile* is a string with the file path
|
||||
of the private key.
|
||||
|
||||
Admonition:Difference to CPython
|
||||
:class: attention
|
||||
|
||||
MicroPython extension: *certfile* and *keyfile* can be bytes objects instead of
|
||||
strings, in which case they are interpreted as the actual certificate/key data.
|
||||
"""
|
||||
...
|
||||
|
||||
class SSLObject:
|
||||
context: SSLContext
|
||||
@property
|
||||
def server_side(self) -> bool: ...
|
||||
@property
|
||||
def server_hostname(self) -> str | None: ...
|
||||
session: SSLSession | None
|
||||
@property
|
||||
def session_reused(self) -> bool: ...
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None: ...
|
||||
def read(self, len: int = 1024, buffer: bytearray | None = None) -> bytes: ...
|
||||
def write(self, data: ReadableBuffer) -> int: ...
|
||||
@overload
|
||||
def getpeercert(self, binary_form: Literal[False] = False) -> _PeerCertRetDictType | None: ...
|
||||
@overload
|
||||
def getpeercert(self, binary_form: Literal[True]) -> bytes | None: ...
|
||||
@overload
|
||||
def getpeercert(self, binary_form: bool) -> _PeerCertRetType: ...
|
||||
def selected_alpn_protocol(self) -> str | None: ...
|
||||
def selected_npn_protocol(self) -> str | None: ...
|
||||
def cipher(self) -> tuple[str, str, int] | None: ...
|
||||
def shared_ciphers(self) -> list[tuple[str, str, int]] | None: ...
|
||||
def compression(self) -> str | None: ...
|
||||
def pending(self) -> int: ...
|
||||
def do_handshake(self) -> None: ...
|
||||
def unwrap(self) -> None: ...
|
||||
def version(self) -> str | None: ...
|
||||
def get_channel_binding(self, cb_type: str = "tls-unique") -> bytes | None: ...
|
||||
def verify_client_post_handshake(self) -> None: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
def get_verified_chain(self) -> list[bytes]: ...
|
||||
def get_unverified_chain(self) -> list[bytes]: ...
|
||||
|
||||
class SSLErrorNumber(enum.IntEnum):
|
||||
SSL_ERROR_EOF = 8
|
||||
SSL_ERROR_INVALID_ERROR_CODE = 10
|
||||
SSL_ERROR_SSL = 1
|
||||
SSL_ERROR_SYSCALL = 5
|
||||
SSL_ERROR_WANT_CONNECT = 7
|
||||
SSL_ERROR_WANT_READ = 2
|
||||
SSL_ERROR_WANT_WRITE = 3
|
||||
SSL_ERROR_WANT_X509_LOOKUP = 4
|
||||
SSL_ERROR_ZERO_RETURN = 6
|
||||
|
||||
# SSL_ERROR_EOF: SSLErrorNumber # undocumented
|
||||
# SSL_ERROR_INVALID_ERROR_CODE: SSLErrorNumber # undocumented
|
||||
# SSL_ERROR_SSL: SSLErrorNumber # undocumented
|
||||
# SSL_ERROR_SYSCALL: SSLErrorNumber # undocumented
|
||||
# SSL_ERROR_WANT_CONNECT: SSLErrorNumber # undocumented
|
||||
# SSL_ERROR_WANT_READ: SSLErrorNumber # undocumented
|
||||
# SSL_ERROR_WANT_WRITE: SSLErrorNumber # undocumented
|
||||
# SSL_ERROR_WANT_X509_LOOKUP: SSLErrorNumber # undocumented
|
||||
# SSL_ERROR_ZERO_RETURN: SSLErrorNumber # undocumented
|
||||
|
||||
def get_protocol_name(protocol_code: int) -> str: ...
|
||||
|
||||
if sys.version_info < (3, 9):
|
||||
AF_INET: int
|
||||
# PEM_FOOTER: str
|
||||
# PEM_HEADER: str
|
||||
# SOCK_STREAM: int
|
||||
# SOL_SOCKET: int
|
||||
# SO_TYPE: int
|
||||
PROTOCOL_DTLS_CLIENT: Incomplete
|
||||
PROTOCOL_DTLS_SERVER: Incomplete
|
||||
MBEDTLS_VERSION: str = "Mbed TLS 3.6.0"
|
||||
120
.venv/lib/python3.12/site-packages/stdlib/struct.pyi
Normal file
120
.venv/lib/python3.12/site-packages/stdlib/struct.pyi
Normal file
@@ -0,0 +1,120 @@
|
||||
"""
|
||||
Pack and unpack primitive data types.
|
||||
|
||||
MicroPython module: https://docs.micropython.org/en/v1.26.0/library/struct.html
|
||||
|
||||
CPython module: :mod:`python:struct` https://docs.python.org/3/library/struct.html .
|
||||
|
||||
The following byte orders are supported:
|
||||
|
||||
+-----------+------------------------+----------+-----------+
|
||||
| Character | Byte order | Size | Alignment |
|
||||
+===========+========================+==========+===========+
|
||||
| @ | native | native | native |
|
||||
+-----------+------------------------+----------+-----------+
|
||||
| < | little-endian | standard | none |
|
||||
+-----------+------------------------+----------+-----------+
|
||||
| > | big-endian | standard | none |
|
||||
+-----------+------------------------+----------+-----------+
|
||||
| ! | network (= big-endian) | standard | none |
|
||||
+-----------+------------------------+----------+-----------+
|
||||
|
||||
The following data types are supported:
|
||||
|
||||
+--------+--------------------+-------------------+---------------+
|
||||
| Format | C Type | Python type | Standard size |
|
||||
+========+====================+===================+===============+
|
||||
| b | signed char | integer | 1 |
|
||||
+--------+--------------------+-------------------+---------------+
|
||||
| B | unsigned char | integer | 1 |
|
||||
+--------+--------------------+-------------------+---------------+
|
||||
| h | short | integer | 2 |
|
||||
+--------+--------------------+-------------------+---------------+
|
||||
| H | unsigned short | integer | 2 |
|
||||
+--------+--------------------+-------------------+---------------+
|
||||
| i | int | integer (`1<fn>`) | 4 |
|
||||
+--------+--------------------+-------------------+---------------+
|
||||
| I | unsigned int | integer (`1<fn>`) | 4 |
|
||||
+--------+--------------------+-------------------+---------------+
|
||||
| l | long | integer (`1<fn>`) | 4 |
|
||||
+--------+--------------------+-------------------+---------------+
|
||||
| L | unsigned long | integer (`1<fn>`) | 4 |
|
||||
+--------+--------------------+-------------------+---------------+
|
||||
| q | long long | integer (`1<fn>`) | 8 |
|
||||
+--------+--------------------+-------------------+---------------+
|
||||
| Q | unsigned long long | integer (`1<fn>`) | 8 |
|
||||
+--------+--------------------+-------------------+---------------+
|
||||
| e | n/a (half-float) | float (`2<fn>`) | 2 |
|
||||
+--------+--------------------+-------------------+---------------+
|
||||
| f | float | float (`2<fn>`) | 4 |
|
||||
+--------+--------------------+-------------------+---------------+
|
||||
| d | double | float (`2<fn>`) | 8 |
|
||||
+--------+--------------------+-------------------+---------------+
|
||||
| s | char[] | bytes | |
|
||||
+--------+--------------------+-------------------+---------------+
|
||||
| P | void * | integer | |
|
||||
+--------+--------------------+-------------------+---------------+
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
from _typeshed import Incomplete, ReadableBuffer, WriteableBuffer
|
||||
from collections.abc import Iterator
|
||||
from typing import Any
|
||||
from _mpy_shed import AnyReadableBuf, AnyWritableBuf
|
||||
from typing_extensions import Awaitable, TypeAlias, TypeVar
|
||||
|
||||
__all__ = ["calcsize", "pack", "pack_into", "unpack", "unpack_from", "iter_unpack", "Struct", "error"]
|
||||
|
||||
class error(Exception): ...
|
||||
|
||||
def pack(fmt: str | bytes, /, *v: Any) -> bytes:
|
||||
"""
|
||||
Pack the values *v1*, *v2*, ... according to the format string *fmt*.
|
||||
The return value is a bytes object encoding the values.
|
||||
"""
|
||||
...
|
||||
|
||||
def pack_into(fmt: str | bytes, buffer: AnyWritableBuf, offset: int, /, *v: Any) -> None:
|
||||
"""
|
||||
Pack the values *v1*, *v2*, ... according to the format string *fmt*
|
||||
into a *buffer* starting at *offset*. *offset* may be negative to count
|
||||
from the end of *buffer*.
|
||||
"""
|
||||
...
|
||||
|
||||
def unpack(fmt: str | bytes, data: AnyReadableBuf, /) -> tuple[Any, ...]:
|
||||
"""
|
||||
Unpack from the *data* according to the format string *fmt*.
|
||||
The return value is a tuple of the unpacked values.
|
||||
"""
|
||||
...
|
||||
|
||||
def unpack_from(fmt: str | bytes, data: AnyReadableBuf, offset: int = 0, /) -> tuple[Any, ...]:
|
||||
"""
|
||||
Unpack from the *data* starting at *offset* according to the format string
|
||||
*fmt*. *offset* may be negative to count from the end of *data*. The return
|
||||
value is a tuple of the unpacked values.
|
||||
"""
|
||||
...
|
||||
|
||||
def iter_unpack(format: str | bytes, buffer: ReadableBuffer, /) -> Iterator[tuple[Any, ...]]: ...
|
||||
def calcsize(
|
||||
fmt: str | bytes,
|
||||
/,
|
||||
) -> int:
|
||||
"""
|
||||
Return the number of bytes needed to store the given *fmt*.
|
||||
"""
|
||||
...
|
||||
|
||||
class Struct:
|
||||
@property
|
||||
def format(self) -> str: ...
|
||||
@property
|
||||
def size(self) -> int: ...
|
||||
def __init__(self, format: str | bytes) -> None: ...
|
||||
def pack(self, *v: Any) -> bytes: ...
|
||||
def pack_into(self, buffer: WriteableBuffer, offset: int, *v: Any) -> None: ...
|
||||
def unpack(self, buffer: ReadableBuffer, /) -> tuple[Any, ...]: ...
|
||||
def unpack_from(self, buffer: ReadableBuffer, offset: int = 0) -> tuple[Any, ...]: ...
|
||||
def iter_unpack(self, buffer: ReadableBuffer, /) -> Iterator[tuple[Any, ...]]: ...
|
||||
786
.venv/lib/python3.12/site-packages/stdlib/sys/__init__.pyi
Normal file
786
.venv/lib/python3.12/site-packages/stdlib/sys/__init__.pyi
Normal file
@@ -0,0 +1,786 @@
|
||||
"""
|
||||
System specific functions.
|
||||
|
||||
MicroPython module: https://docs.micropython.org/en/v1.26.0/library/sys.html
|
||||
|
||||
CPython module: :mod:`python:sys` https://docs.python.org/3/library/sys.html .
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
import sys
|
||||
from _typeshed import Incomplete, MaybeNone, OptExcInfo, ProfileFunction, TraceFunction, structseq
|
||||
from _typeshed.importlib import MetaPathFinderProtocol, PathEntryFinderProtocol
|
||||
from builtins import object as _object
|
||||
from collections.abc import AsyncGenerator, Callable
|
||||
from io import TextIOWrapper
|
||||
from types import FrameType, ModuleType, TracebackType
|
||||
from typing import Callable, overload, Any, Final, Literal, NoReturn, Protocol, TextIO, TypeVar, final
|
||||
from typing_extensions import Awaitable, TypeVar, TypeAlias
|
||||
from _mpy_shed import IOBase_mp, _mp_implementation
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
# see https://github.com/python/typeshed/issues/8513#issue-1333671093 for the rationale behind this alias
|
||||
_ExitCode: TypeAlias = str | int | None
|
||||
_OptExcInfo: TypeAlias = OptExcInfo # noqa: Y047 # TODO: obsolete, remove fall 2022 or later
|
||||
|
||||
# ----- sys variables -----
|
||||
if sys.platform != "win32":
|
||||
abiflags: str
|
||||
argv: list[str]
|
||||
# base_exec_prefix: str
|
||||
# base_prefix: str
|
||||
byteorder: Literal["little", "big"]
|
||||
# builtin_module_names: Sequence[str] # actually a tuple of strings
|
||||
# copyright: str
|
||||
if sys.platform == "win32":
|
||||
dllhandle: int
|
||||
# dont_write_bytecode: bool
|
||||
# displayhook: Callable[[object], Any]
|
||||
# excepthook: Callable[[type[BaseException], BaseException, TracebackType | None], Any]
|
||||
# exec_prefix: str
|
||||
# executable: str
|
||||
# float_repr_style: Literal["short", "legacy"]
|
||||
# hexversion: int
|
||||
# last_type: type[BaseException] | None
|
||||
# last_value: BaseException | None
|
||||
# last_traceback: TracebackType | None
|
||||
if sys.version_info >= (3, 12):
|
||||
last_exc: BaseException # or undefined.
|
||||
maxsize: int
|
||||
# maxunicode: int
|
||||
# meta_path: list[MetaPathFinderProtocol]
|
||||
modules: dict[str, ModuleType]
|
||||
if sys.version_info >= (3, 10):
|
||||
orig_argv: list[str]
|
||||
path: list[str]
|
||||
# path_hooks: list[Callable[[str], PathEntryFinderProtocol]]
|
||||
# path_importer_cache: dict[str, PathEntryFinderProtocol | None]
|
||||
platform: str
|
||||
if sys.version_info >= (3, 9):
|
||||
platlibdir: str
|
||||
# prefix: str
|
||||
# pycache_prefix: str | None
|
||||
ps1: object
|
||||
ps2: object
|
||||
|
||||
# TextIO is used instead of more specific types for the standard streams,
|
||||
# since they are often monkeypatched at runtime. At startup, the objects
|
||||
# are initialized to instances of TextIOWrapper, but can also be None under
|
||||
# some circumstances.
|
||||
#
|
||||
# To use methods from TextIOWrapper, use an isinstance check to ensure that
|
||||
# the streams have not been overridden:
|
||||
#
|
||||
# if isinstance(sys.stdout, io.TextIOWrapper):
|
||||
# sys.stdout.reconfigure(...)
|
||||
stdin: TextIO | MaybeNone
|
||||
stdout: TextIO | MaybeNone
|
||||
stderr: TextIO | MaybeNone
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
stdlib_module_names: frozenset[str]
|
||||
|
||||
__stdin__: Final[TextIOWrapper | None] # Contains the original value of stdin
|
||||
__stdout__: Final[TextIOWrapper | None] # Contains the original value of stdout
|
||||
__stderr__: Final[TextIOWrapper | None] # Contains the original value of stderr
|
||||
tracebacklimit: int
|
||||
version: str
|
||||
# api_version: int
|
||||
# warnoptions: Any
|
||||
# Each entry is a tuple of the form (action, message, category, module,
|
||||
# lineno)
|
||||
if sys.platform == "win32":
|
||||
winver: str
|
||||
_xoptions: dict[Any, Any]
|
||||
|
||||
# Type alias used as a mixin for structseq classes that cannot be instantiated at runtime
|
||||
# This can't be represented in the type system, so we just use `structseq[Any]`
|
||||
_UninstantiableStructseq: TypeAlias = structseq[Any]
|
||||
|
||||
# flags: _flags
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
_FlagTuple: TypeAlias = tuple[int, int, int, int, int, int, int, int, int, int, int, int, int, bool, int, int]
|
||||
else:
|
||||
_FlagTuple: TypeAlias = tuple[int, int, int, int, int, int, int, int, int, int, int, int, int, bool, int]
|
||||
|
||||
@final
|
||||
class _flags(_UninstantiableStructseq, _FlagTuple):
|
||||
@property
|
||||
def debug(self) -> int: ...
|
||||
@property
|
||||
def inspect(self) -> int: ...
|
||||
@property
|
||||
def interactive(self) -> int: ...
|
||||
@property
|
||||
def optimize(self) -> int: ...
|
||||
@property
|
||||
def dont_write_bytecode(self) -> int: ...
|
||||
@property
|
||||
def no_user_site(self) -> int: ...
|
||||
@property
|
||||
def no_site(self) -> int: ...
|
||||
@property
|
||||
def ignore_environment(self) -> int: ...
|
||||
@property
|
||||
def verbose(self) -> int: ...
|
||||
@property
|
||||
def bytes_warning(self) -> int: ...
|
||||
@property
|
||||
def quiet(self) -> int: ...
|
||||
@property
|
||||
def hash_randomization(self) -> int: ...
|
||||
@property
|
||||
def isolated(self) -> int: ...
|
||||
@property
|
||||
def dev_mode(self) -> bool: ...
|
||||
@property
|
||||
def utf8_mode(self) -> int: ...
|
||||
if sys.version_info >= (3, 10):
|
||||
@property
|
||||
def warn_default_encoding(self) -> int: ... # undocumented
|
||||
if sys.version_info >= (3, 11):
|
||||
@property
|
||||
def safe_path(self) -> bool: ...
|
||||
|
||||
# float_info: _float_info
|
||||
|
||||
@final
|
||||
class _float_info(structseq[float], tuple[float, int, int, float, int, int, int, int, float, int, int]):
|
||||
@property
|
||||
def max(self) -> float: ... # DBL_MAX
|
||||
@property
|
||||
def max_exp(self) -> int: ... # DBL_MAX_EXP
|
||||
@property
|
||||
def max_10_exp(self) -> int: ... # DBL_MAX_10_EXP
|
||||
@property
|
||||
def min(self) -> float: ... # DBL_MIN
|
||||
@property
|
||||
def min_exp(self) -> int: ... # DBL_MIN_EXP
|
||||
@property
|
||||
def min_10_exp(self) -> int: ... # DBL_MIN_10_EXP
|
||||
@property
|
||||
def dig(self) -> int: ... # DBL_DIG
|
||||
@property
|
||||
def mant_dig(self) -> int: ... # DBL_MANT_DIG
|
||||
@property
|
||||
def epsilon(self) -> float: ... # DBL_EPSILON
|
||||
@property
|
||||
def radix(self) -> int: ... # FLT_RADIX
|
||||
@property
|
||||
def rounds(self) -> int: ... # FLT_ROUNDS
|
||||
|
||||
# hash_info: _hash_info
|
||||
|
||||
@final
|
||||
class _hash_info(structseq[Any | int], tuple[int, int, int, int, int, str, int, int, int]):
|
||||
@property
|
||||
def width(self) -> int: ...
|
||||
@property
|
||||
def modulus(self) -> int: ...
|
||||
@property
|
||||
def inf(self) -> int: ...
|
||||
@property
|
||||
def nan(self) -> int: ...
|
||||
@property
|
||||
def imag(self) -> int: ...
|
||||
@property
|
||||
def algorithm(self) -> str: ...
|
||||
@property
|
||||
def hash_bits(self) -> int: ...
|
||||
@property
|
||||
def seed_bits(self) -> int: ...
|
||||
@property
|
||||
def cutoff(self) -> int: ... # undocumented
|
||||
|
||||
implementation: _mp_implementation
|
||||
|
||||
class _implementation:
|
||||
name: str
|
||||
version: _version_info
|
||||
hexversion: int
|
||||
cache_tag: str
|
||||
# Define __getattr__, as the documentation states:
|
||||
# > sys.implementation may contain additional attributes specific to the Python implementation.
|
||||
# > These non-standard attributes must start with an underscore, and are not described here.
|
||||
def __getattr__(self, name: str) -> Any: ...
|
||||
|
||||
# int_info: _int_info
|
||||
|
||||
@final
|
||||
class _int_info(structseq[int], tuple[int, int, int, int]):
|
||||
@property
|
||||
def bits_per_digit(self) -> int: ...
|
||||
@property
|
||||
def sizeof_digit(self) -> int: ...
|
||||
@property
|
||||
def default_max_str_digits(self) -> int: ...
|
||||
@property
|
||||
def str_digits_check_threshold(self) -> int: ...
|
||||
|
||||
_ThreadInfoName: TypeAlias = Literal["nt", "pthread", "pthread-stubs", "solaris"]
|
||||
_ThreadInfoLock: TypeAlias = Literal["semaphore", "mutex+cond"] | None
|
||||
|
||||
@final
|
||||
class _thread_info(_UninstantiableStructseq, tuple[_ThreadInfoName, _ThreadInfoLock, str | None]):
|
||||
@property
|
||||
def name(self) -> _ThreadInfoName: ...
|
||||
@property
|
||||
def lock(self) -> _ThreadInfoLock: ...
|
||||
@property
|
||||
def version(self) -> str | None: ...
|
||||
|
||||
# thread_info: _thread_info
|
||||
_ReleaseLevel: TypeAlias = Literal["alpha", "beta", "candidate", "final"]
|
||||
|
||||
@final
|
||||
class _version_info(_UninstantiableStructseq, tuple[int, int, int, _ReleaseLevel, int]):
|
||||
@property
|
||||
def major(self) -> int: ...
|
||||
@property
|
||||
def minor(self) -> int: ...
|
||||
@property
|
||||
def micro(self) -> int: ...
|
||||
@property
|
||||
def releaselevel(self) -> _ReleaseLevel: ...
|
||||
@property
|
||||
def serial(self) -> int: ...
|
||||
|
||||
version_info: _version_info
|
||||
|
||||
def call_tracing(func: Callable[..., _T], args: Any, /) -> _T: ...
|
||||
def _clear_type_cache() -> None: ...
|
||||
def _current_frames() -> dict[int, FrameType]: ...
|
||||
def _getframe(depth: int = 0, /) -> FrameType: ...
|
||||
def _debugmallocstats() -> None: ...
|
||||
def __displayhook__(object: object, /) -> None: ...
|
||||
def __excepthook__(exctype: type[BaseException], value: BaseException, traceback: TracebackType | None, /) -> None: ...
|
||||
def exc_info() -> OptExcInfo: ...
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
def exception() -> BaseException | None: ...
|
||||
|
||||
@overload
|
||||
def exit(retval: object = 0, /) -> NoReturn:
|
||||
"""
|
||||
Terminate current program with a given exit code. Underlyingly, this
|
||||
function raises a `SystemExit` exception. If an argument is given, its
|
||||
value given as an argument to `SystemExit`.
|
||||
|
||||
On embedded ports (i.e. all ports but Windows and Unix), an unhandled
|
||||
`SystemExit` currently causes a :ref:`soft_reset` of MicroPython.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def exit(retval: object = 0, /) -> NoReturn:
|
||||
"""
|
||||
Terminate current program with a given exit code. Underlyingly, this
|
||||
function raises a `SystemExit` exception. If an argument is given, its
|
||||
value given as an argument to `SystemExit`.
|
||||
|
||||
On embedded ports (i.e. all ports but Windows and Unix), an unhandled
|
||||
`SystemExit` currently causes a :ref:`soft_reset` of MicroPython.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def exit(retval: object = 0, /) -> NoReturn:
|
||||
"""
|
||||
Terminate current program with a given exit code. Underlyingly, this
|
||||
function raises a `SystemExit` exception. If an argument is given, its
|
||||
value given as an argument to `SystemExit`.
|
||||
|
||||
On embedded ports (i.e. all ports but Windows and Unix), an unhandled
|
||||
`SystemExit` currently causes a :ref:`soft_reset` of MicroPython.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def exit(retval: object = 0, /) -> NoReturn:
|
||||
"""
|
||||
Terminate current program with a given exit code. Underlyingly, this
|
||||
function raises a `SystemExit` exception. If an argument is given, its
|
||||
value given as an argument to `SystemExit`.
|
||||
|
||||
On embedded ports (i.e. all ports but Windows and Unix), an unhandled
|
||||
`SystemExit` currently causes a :ref:`soft_reset` of MicroPython.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def exit(retval: object = 0, /) -> NoReturn:
|
||||
"""
|
||||
Terminate current program with a given exit code. Underlyingly, this
|
||||
function raises a `SystemExit` exception. If an argument is given, its
|
||||
value given as an argument to `SystemExit`.
|
||||
|
||||
On embedded ports (i.e. all ports but Windows and Unix), an unhandled
|
||||
`SystemExit` currently causes a :ref:`soft_reset` of MicroPython.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def exit(retval: object = 0, /) -> NoReturn:
|
||||
"""
|
||||
Terminate current program with a given exit code. Underlyingly, this
|
||||
function raises a `SystemExit` exception. If an argument is given, its
|
||||
value given as an argument to `SystemExit`.
|
||||
|
||||
On embedded ports (i.e. all ports but Windows and Unix), an unhandled
|
||||
`SystemExit` currently causes a :ref:`soft_reset` of MicroPython.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def exit(retval: object = 0, /) -> NoReturn:
|
||||
"""
|
||||
Terminate current program with a given exit code. Underlyingly, this
|
||||
function raises a `SystemExit` exception. If an argument is given, its
|
||||
value given as an argument to `SystemExit`.
|
||||
|
||||
On embedded ports (i.e. all ports but Windows and Unix), an unhandled
|
||||
`SystemExit` currently causes a :ref:`soft_reset` of MicroPython.
|
||||
"""
|
||||
...
|
||||
|
||||
def getallocatedblocks() -> int: ...
|
||||
def getdefaultencoding() -> str: ...
|
||||
|
||||
if sys.platform != "win32":
|
||||
def getdlopenflags() -> int: ...
|
||||
|
||||
def getfilesystemencoding() -> str: ...
|
||||
def getfilesystemencodeerrors() -> str: ...
|
||||
def getrefcount(object: Any, /) -> int: ...
|
||||
def getrecursionlimit() -> int: ...
|
||||
def getsizeof(obj: object, default: int = ...) -> int: ...
|
||||
def getswitchinterval() -> float: ...
|
||||
def getprofile() -> ProfileFunction | None: ...
|
||||
def setprofile(function: ProfileFunction | None, /) -> None: ...
|
||||
def gettrace() -> TraceFunction | None: ...
|
||||
@overload
|
||||
def settrace(tracefunc) -> None:
|
||||
"""
|
||||
Enable tracing of bytecode execution. For details see the `CPython
|
||||
documentation `<https://docs.python.org/3/library/sys.html#sys.settrace>.
|
||||
|
||||
This function requires a custom MicroPython build as it is typically not
|
||||
present in pre-built firmware (due to it affecting performance). The relevant
|
||||
configuration option is *MICROPY_PY_SYS_SETTRACE*.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def settrace(tracefunc) -> None:
|
||||
"""
|
||||
Enable tracing of bytecode execution. For details see the `CPython
|
||||
documentation `<https://docs.python.org/3/library/sys.html#sys.settrace>.
|
||||
|
||||
This function requires a custom MicroPython build as it is typically not
|
||||
present in pre-built firmware (due to it affecting performance). The relevant
|
||||
configuration option is *MICROPY_PY_SYS_SETTRACE*.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def settrace(tracefunc) -> None:
|
||||
"""
|
||||
Enable tracing of bytecode execution. For details see the `CPython
|
||||
documentation `<https://docs.python.org/3/library/sys.html#sys.settrace>.
|
||||
|
||||
This function requires a custom MicroPython build as it is typically not
|
||||
present in pre-built firmware (due to it affecting performance). The relevant
|
||||
configuration option is *MICROPY_PY_SYS_SETTRACE*.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def settrace(tracefunc) -> None:
|
||||
"""
|
||||
Enable tracing of bytecode execution. For details see the `CPython
|
||||
documentation `<https://docs.python.org/3/library/sys.html#sys.settrace>.
|
||||
|
||||
This function requires a custom MicroPython build as it is typically not
|
||||
present in pre-built firmware (due to it affecting performance). The relevant
|
||||
configuration option is *MICROPY_PY_SYS_SETTRACE*.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def settrace(tracefunc) -> None:
|
||||
"""
|
||||
Enable tracing of bytecode execution. For details see the `CPython
|
||||
documentation `<https://docs.python.org/3/library/sys.html#sys.settrace>.
|
||||
|
||||
This function requires a custom MicroPython build as it is typically not
|
||||
present in pre-built firmware (due to it affecting performance). The relevant
|
||||
configuration option is *MICROPY_PY_SYS_SETTRACE*.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def settrace(tracefunc) -> None:
|
||||
"""
|
||||
Enable tracing of bytecode execution. For details see the `CPython
|
||||
documentation `<https://docs.python.org/3/library/sys.html#sys.settrace>.
|
||||
|
||||
This function requires a custom MicroPython build as it is typically not
|
||||
present in pre-built firmware (due to it affecting performance). The relevant
|
||||
configuration option is *MICROPY_PY_SYS_SETTRACE*.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def settrace(tracefunc) -> None:
|
||||
"""
|
||||
Enable tracing of bytecode execution. For details see the `CPython
|
||||
documentation `<https://docs.python.org/3/library/sys.html#sys.settrace>.
|
||||
|
||||
This function requires a custom MicroPython build as it is typically not
|
||||
present in pre-built firmware (due to it affecting performance). The relevant
|
||||
configuration option is *MICROPY_PY_SYS_SETTRACE*.
|
||||
"""
|
||||
...
|
||||
|
||||
if sys.platform == "win32":
|
||||
# A tuple of length 5, even though it has more than 5 attributes.
|
||||
@final
|
||||
class _WinVersion(_UninstantiableStructseq, tuple[int, int, int, int, str]):
|
||||
@property
|
||||
def major(self) -> int: ...
|
||||
@property
|
||||
def minor(self) -> int: ...
|
||||
@property
|
||||
def build(self) -> int: ...
|
||||
@property
|
||||
def platform(self) -> int: ...
|
||||
@property
|
||||
def service_pack(self) -> str: ...
|
||||
@property
|
||||
def service_pack_minor(self) -> int: ...
|
||||
@property
|
||||
def service_pack_major(self) -> int: ...
|
||||
@property
|
||||
def suite_mask(self) -> int: ...
|
||||
@property
|
||||
def product_type(self) -> int: ...
|
||||
@property
|
||||
def platform_version(self) -> tuple[int, int, int]: ...
|
||||
|
||||
def getwindowsversion() -> _WinVersion: ...
|
||||
|
||||
def intern(string: str, /) -> str: ...
|
||||
def is_finalizing() -> bool: ...
|
||||
def breakpointhook(*args: Any, **kwargs: Any) -> Any: ...
|
||||
|
||||
__breakpointhook__ = breakpointhook # Contains the original value of breakpointhook
|
||||
|
||||
if sys.platform != "win32":
|
||||
def setdlopenflags(flags: int, /) -> None: ...
|
||||
|
||||
def setrecursionlimit(limit: int, /) -> None: ...
|
||||
def setswitchinterval(interval: float, /) -> None: ...
|
||||
def gettotalrefcount() -> int: ... # Debug builds only
|
||||
|
||||
if sys.version_info < (3, 9):
|
||||
def getcheckinterval() -> int: ... # deprecated
|
||||
def setcheckinterval(n: int, /) -> None: ... # deprecated
|
||||
|
||||
if sys.version_info < (3, 9):
|
||||
# An 11-tuple or None
|
||||
def callstats() -> tuple[int, int, int, int, int, int, int, int, int, int, int] | None: ...
|
||||
|
||||
# Doesn't exist at runtime, but exported in the stubs so pytest etc. can annotate their code more easily.
|
||||
class UnraisableHookArgs(Protocol):
|
||||
exc_type: type[BaseException]
|
||||
exc_value: BaseException | None
|
||||
exc_traceback: TracebackType | None
|
||||
err_msg: str | None
|
||||
object: _object
|
||||
|
||||
# unraisablehook: Callable[[UnraisableHookArgs], Any]
|
||||
|
||||
def __unraisablehook__(unraisable: UnraisableHookArgs, /) -> Any: ...
|
||||
def addaudithook(hook: Callable[[str, tuple[Any, ...]], Any]) -> None: ...
|
||||
def audit(event: str, /, *args: Any) -> None: ...
|
||||
|
||||
_AsyncgenHook: TypeAlias = Callable[[AsyncGenerator[Any, Any]], None] | None
|
||||
|
||||
@final
|
||||
class _asyncgen_hooks(structseq[_AsyncgenHook], tuple[_AsyncgenHook, _AsyncgenHook]):
|
||||
@property
|
||||
def firstiter(self) -> _AsyncgenHook: ...
|
||||
@property
|
||||
def finalizer(self) -> _AsyncgenHook: ...
|
||||
|
||||
def get_asyncgen_hooks() -> _asyncgen_hooks: ...
|
||||
def set_asyncgen_hooks(firstiter: _AsyncgenHook = ..., finalizer: _AsyncgenHook = ...) -> None: ...
|
||||
|
||||
if sys.platform == "win32":
|
||||
def _enablelegacywindowsfsencoding() -> None: ...
|
||||
|
||||
def get_coroutine_origin_tracking_depth() -> int: ...
|
||||
def set_coroutine_origin_tracking_depth(depth: int) -> None: ...
|
||||
|
||||
# The following two functions were added in 3.11.0, 3.10.7, 3.9.14, and 3.8.14,
|
||||
# as part of the response to CVE-2020-10735
|
||||
def set_int_max_str_digits(maxdigits: int) -> None: ...
|
||||
def get_int_max_str_digits() -> int: ...
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
if sys.version_info >= (3, 13):
|
||||
def getunicodeinternedsize(*, _only_immortal: bool = False) -> int: ...
|
||||
else:
|
||||
def getunicodeinternedsize() -> int: ...
|
||||
|
||||
def deactivate_stack_trampoline() -> None: ...
|
||||
def is_stack_trampoline_active() -> bool: ...
|
||||
# It always exists, but raises on non-linux platforms:
|
||||
if sys.platform == "linux":
|
||||
def activate_stack_trampoline(backend: str, /) -> None: ...
|
||||
else:
|
||||
def activate_stack_trampoline(backend: str, /) -> NoReturn: ...
|
||||
|
||||
from . import _monitoring
|
||||
|
||||
monitoring = _monitoring
|
||||
|
||||
@overload
|
||||
def __mpy_has_no_atexit(func: Callable[[], None] | None, /) -> Callable[[], None] | None:
|
||||
"""
|
||||
Register *func* to be called upon termination. *func* must be a callable
|
||||
that takes no arguments, or ``None`` to disable the call. The ``atexit``
|
||||
function will return the previous value set by this function, which is
|
||||
initially ``None``.
|
||||
|
||||
Admonition:Difference to CPython
|
||||
:class: attention
|
||||
|
||||
This function is a MicroPython extension intended to provide similar
|
||||
functionality to the :mod:`atexit` module in CPython.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def print_exception(exc: Exception | BaseException, file: IOBase_mp = stdout, /) -> None:
|
||||
"""
|
||||
Print exception with a traceback to a file-like object *file* (or
|
||||
`sys.stdout` by default).
|
||||
|
||||
Admonition:Difference to CPython
|
||||
:class: attention
|
||||
|
||||
This is simplified version of a function which appears in the
|
||||
``traceback`` module in CPython. Unlike ``traceback.print_exception()``,
|
||||
this function takes just exception value instead of exception type,
|
||||
exception value, and traceback object; *file* argument should be
|
||||
positional; further arguments are not supported. CPython-compatible
|
||||
``traceback`` module can be found in `micropython-lib`.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def print_exception(exc: Exception | BaseException, file: IOBase_mp = stdout, /) -> None:
|
||||
"""
|
||||
Print exception with a traceback to a file-like object *file* (or
|
||||
`sys.stdout` by default).
|
||||
|
||||
Admonition:Difference to CPython
|
||||
:class: attention
|
||||
|
||||
This is simplified version of a function which appears in the
|
||||
``traceback`` module in CPython. Unlike ``traceback.print_exception()``,
|
||||
this function takes just exception value instead of exception type,
|
||||
exception value, and traceback object; *file* argument should be
|
||||
positional; further arguments are not supported. CPython-compatible
|
||||
``traceback`` module can be found in `micropython-lib`.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def print_exception(exc: Exception | BaseException, file: IOBase_mp = stdout, /) -> None:
|
||||
"""
|
||||
Print exception with a traceback to a file-like object *file* (or
|
||||
`sys.stdout` by default).
|
||||
|
||||
Admonition:Difference to CPython
|
||||
:class: attention
|
||||
|
||||
This is simplified version of a function which appears in the
|
||||
``traceback`` module in CPython. Unlike ``traceback.print_exception()``,
|
||||
this function takes just exception value instead of exception type,
|
||||
exception value, and traceback object; *file* argument should be
|
||||
positional; further arguments are not supported. CPython-compatible
|
||||
``traceback`` module can be found in `micropython-lib`.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def print_exception(exc: Exception | BaseException, file: IOBase_mp = stdout, /) -> None:
|
||||
"""
|
||||
Print exception with a traceback to a file-like object *file* (or
|
||||
`sys.stdout` by default).
|
||||
|
||||
Admonition:Difference to CPython
|
||||
:class: attention
|
||||
|
||||
This is simplified version of a function which appears in the
|
||||
``traceback`` module in CPython. Unlike ``traceback.print_exception()``,
|
||||
this function takes just exception value instead of exception type,
|
||||
exception value, and traceback object; *file* argument should be
|
||||
positional; further arguments are not supported. CPython-compatible
|
||||
``traceback`` module can be found in `micropython-lib`.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def print_exception(exc: Exception | BaseException, file: IOBase_mp = stdout, /) -> None:
|
||||
"""
|
||||
Print exception with a traceback to a file-like object *file* (or
|
||||
`sys.stdout` by default).
|
||||
|
||||
Admonition:Difference to CPython
|
||||
:class: attention
|
||||
|
||||
This is simplified version of a function which appears in the
|
||||
``traceback`` module in CPython. Unlike ``traceback.print_exception()``,
|
||||
this function takes just exception value instead of exception type,
|
||||
exception value, and traceback object; *file* argument should be
|
||||
positional; further arguments are not supported. CPython-compatible
|
||||
``traceback`` module can be found in `micropython-lib`.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def print_exception(exc: Exception | BaseException, file: IOBase_mp = stdout, /) -> None:
|
||||
"""
|
||||
Print exception with a traceback to a file-like object *file* (or
|
||||
`sys.stdout` by default).
|
||||
|
||||
Admonition:Difference to CPython
|
||||
:class: attention
|
||||
|
||||
This is simplified version of a function which appears in the
|
||||
``traceback`` module in CPython. Unlike ``traceback.print_exception()``,
|
||||
this function takes just exception value instead of exception type,
|
||||
exception value, and traceback object; *file* argument should be
|
||||
positional; further arguments are not supported. CPython-compatible
|
||||
``traceback`` module can be found in `micropython-lib`.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def print_exception(exc: Exception | BaseException, file: IOBase_mp = stdout, /) -> None:
|
||||
"""
|
||||
Print exception with a traceback to a file-like object *file* (or
|
||||
`sys.stdout` by default).
|
||||
|
||||
Admonition:Difference to CPython
|
||||
:class: attention
|
||||
|
||||
This is simplified version of a function which appears in the
|
||||
``traceback`` module in CPython. Unlike ``traceback.print_exception()``,
|
||||
this function takes just exception value instead of exception type,
|
||||
exception value, and traceback object; *file* argument should be
|
||||
positional; further arguments are not supported. CPython-compatible
|
||||
``traceback`` module can be found in `micropython-lib`.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def __mpy_has_no_atexit(func: Callable[[], None] | None, /) -> Callable[[], None] | None:
|
||||
"""
|
||||
Register *func* to be called upon termination. *func* must be a callable
|
||||
that takes no arguments, or ``None`` to disable the call. The ``atexit``
|
||||
function will return the previous value set by this function, which is
|
||||
initially ``None``.
|
||||
|
||||
Admonition:Difference to CPython
|
||||
:class: attention
|
||||
|
||||
This function is a MicroPython extension intended to provide similar
|
||||
functionality to the :mod:`atexit` module in CPython.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def __mpy_has_no_atexit(func: Callable[[], None] | None, /) -> Callable[[], None] | None:
|
||||
"""
|
||||
Register *func* to be called upon termination. *func* must be a callable
|
||||
that takes no arguments, or ``None`` to disable the call. The ``atexit``
|
||||
function will return the previous value set by this function, which is
|
||||
initially ``None``.
|
||||
|
||||
Admonition:Difference to CPython
|
||||
:class: attention
|
||||
|
||||
This function is a MicroPython extension intended to provide similar
|
||||
functionality to the :mod:`atexit` module in CPython.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def __mpy_has_no_atexit(func: Callable[[], None] | None, /) -> Callable[[], None] | None:
|
||||
"""
|
||||
Register *func* to be called upon termination. *func* must be a callable
|
||||
that takes no arguments, or ``None`` to disable the call. The ``atexit``
|
||||
function will return the previous value set by this function, which is
|
||||
initially ``None``.
|
||||
|
||||
Admonition:Difference to CPython
|
||||
:class: attention
|
||||
|
||||
This function is a MicroPython extension intended to provide similar
|
||||
functionality to the :mod:`atexit` module in CPython.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def __mpy_has_no_atexit(func: Callable[[], None] | None, /) -> Callable[[], None] | None:
|
||||
"""
|
||||
Register *func* to be called upon termination. *func* must be a callable
|
||||
that takes no arguments, or ``None`` to disable the call. The ``atexit``
|
||||
function will return the previous value set by this function, which is
|
||||
initially ``None``.
|
||||
|
||||
Admonition:Difference to CPython
|
||||
:class: attention
|
||||
|
||||
This function is a MicroPython extension intended to provide similar
|
||||
functionality to the :mod:`atexit` module in CPython.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def __mpy_has_no_atexit(func: Callable[[], None] | None, /) -> Callable[[], None] | None:
|
||||
"""
|
||||
Register *func* to be called upon termination. *func* must be a callable
|
||||
that takes no arguments, or ``None`` to disable the call. The ``atexit``
|
||||
function will return the previous value set by this function, which is
|
||||
initially ``None``.
|
||||
|
||||
Admonition:Difference to CPython
|
||||
:class: attention
|
||||
|
||||
This function is a MicroPython extension intended to provide similar
|
||||
functionality to the :mod:`atexit` module in CPython.
|
||||
"""
|
||||
...
|
||||
|
||||
@overload
|
||||
def __mpy_has_no_atexit(func: Callable[[], None] | None, /) -> Callable[[], None] | None:
|
||||
"""
|
||||
Register *func* to be called upon termination. *func* must be a callable
|
||||
that takes no arguments, or ``None`` to disable the call. The ``atexit``
|
||||
function will return the previous value set by this function, which is
|
||||
initially ``None``.
|
||||
|
||||
Admonition:Difference to CPython
|
||||
:class: attention
|
||||
|
||||
This function is a MicroPython extension intended to provide similar
|
||||
functionality to the :mod:`atexit` module in CPython.
|
||||
"""
|
||||
...
|
||||
641
.venv/lib/python3.12/site-packages/stdlib/types.pyi
Normal file
641
.venv/lib/python3.12/site-packages/stdlib/types.pyi
Normal file
@@ -0,0 +1,641 @@
|
||||
import sys
|
||||
from _typeshed import MaybeNone, SupportsKeysAndGetItem
|
||||
from _typeshed.importlib import LoaderProtocol
|
||||
from collections.abc import (
|
||||
AsyncGenerator,
|
||||
Awaitable,
|
||||
Callable,
|
||||
Coroutine,
|
||||
Generator,
|
||||
ItemsView,
|
||||
Iterable,
|
||||
Iterator,
|
||||
KeysView,
|
||||
MutableSequence,
|
||||
ValuesView,
|
||||
)
|
||||
from importlib.machinery import ModuleSpec
|
||||
|
||||
# pytype crashes if types.MappingProxyType inherits from collections.abc.Mapping instead of typing.Mapping
|
||||
from typing import Any, ClassVar, Literal, Mapping, TypeVar, final, overload # noqa: Y022
|
||||
from typing_extensions import ParamSpec, Self, TypeVarTuple, deprecated
|
||||
|
||||
__all__ = [
|
||||
"FunctionType",
|
||||
"LambdaType",
|
||||
"CodeType",
|
||||
"MappingProxyType",
|
||||
"SimpleNamespace",
|
||||
"GeneratorType",
|
||||
"CoroutineType",
|
||||
"AsyncGeneratorType",
|
||||
"MethodType",
|
||||
"BuiltinFunctionType",
|
||||
"ModuleType",
|
||||
"TracebackType",
|
||||
"FrameType",
|
||||
"GetSetDescriptorType",
|
||||
"MemberDescriptorType",
|
||||
"new_class",
|
||||
"prepare_class",
|
||||
"DynamicClassAttribute",
|
||||
"coroutine",
|
||||
"BuiltinMethodType",
|
||||
"ClassMethodDescriptorType",
|
||||
"MethodDescriptorType",
|
||||
"MethodWrapperType",
|
||||
"WrapperDescriptorType",
|
||||
"resolve_bases",
|
||||
"CellType",
|
||||
]
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
__all__ += ["GenericAlias"]
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
__all__ += ["EllipsisType", "NoneType", "NotImplementedType", "UnionType"]
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
__all__ += ["get_original_bases"]
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
__all__ += ["CapsuleType"]
|
||||
|
||||
# Note, all classes "defined" here require special handling.
|
||||
|
||||
_T1 = TypeVar("_T1")
|
||||
_T2 = TypeVar("_T2")
|
||||
_KT = TypeVar("_KT")
|
||||
_VT_co = TypeVar("_VT_co", covariant=True)
|
||||
|
||||
# Make sure this class definition stays roughly in line with `builtins.function`
|
||||
@final
|
||||
class FunctionType:
|
||||
@property
|
||||
def __closure__(self) -> tuple[CellType, ...] | None: ...
|
||||
__code__: CodeType
|
||||
__defaults__: tuple[Any, ...] | None
|
||||
__dict__: dict[str, Any]
|
||||
@property
|
||||
def __globals__(self) -> dict[str, Any]: ...
|
||||
__name__: str
|
||||
__qualname__: str
|
||||
__annotations__: dict[str, Any]
|
||||
__kwdefaults__: dict[str, Any] | None
|
||||
if sys.version_info >= (3, 10):
|
||||
@property
|
||||
def __builtins__(self) -> dict[str, Any]: ...
|
||||
if sys.version_info >= (3, 12):
|
||||
__type_params__: tuple[TypeVar | ParamSpec | TypeVarTuple, ...]
|
||||
|
||||
__module__: str
|
||||
def __new__(
|
||||
cls,
|
||||
code: CodeType,
|
||||
globals: dict[str, Any],
|
||||
name: str | None = ...,
|
||||
argdefs: tuple[object, ...] | None = ...,
|
||||
closure: tuple[CellType, ...] | None = ...,
|
||||
) -> Self: ...
|
||||
def __call__(self, *args: Any, **kwargs: Any) -> Any: ...
|
||||
@overload
|
||||
def __get__(self, instance: None, owner: type, /) -> FunctionType: ...
|
||||
@overload
|
||||
def __get__(self, instance: object, owner: type | None = None, /) -> MethodType: ...
|
||||
|
||||
LambdaType = FunctionType
|
||||
|
||||
@final
|
||||
class CodeType:
|
||||
def __eq__(self, value: object, /) -> bool: ...
|
||||
def __hash__(self) -> int: ...
|
||||
@property
|
||||
def co_argcount(self) -> int: ...
|
||||
@property
|
||||
def co_posonlyargcount(self) -> int: ...
|
||||
@property
|
||||
def co_kwonlyargcount(self) -> int: ...
|
||||
@property
|
||||
def co_nlocals(self) -> int: ...
|
||||
@property
|
||||
def co_stacksize(self) -> int: ...
|
||||
@property
|
||||
def co_flags(self) -> int: ...
|
||||
@property
|
||||
def co_code(self) -> bytes: ...
|
||||
@property
|
||||
def co_consts(self) -> tuple[Any, ...]: ...
|
||||
@property
|
||||
def co_names(self) -> tuple[str, ...]: ...
|
||||
@property
|
||||
def co_varnames(self) -> tuple[str, ...]: ...
|
||||
@property
|
||||
def co_filename(self) -> str: ...
|
||||
@property
|
||||
def co_name(self) -> str: ...
|
||||
@property
|
||||
def co_firstlineno(self) -> int: ...
|
||||
if sys.version_info >= (3, 10):
|
||||
@property
|
||||
@deprecated("Will be removed in Python 3.14. Use the co_lines() method instead.")
|
||||
def co_lnotab(self) -> bytes: ...
|
||||
else:
|
||||
@property
|
||||
def co_lnotab(self) -> bytes: ...
|
||||
|
||||
@property
|
||||
def co_freevars(self) -> tuple[str, ...]: ...
|
||||
@property
|
||||
def co_cellvars(self) -> tuple[str, ...]: ...
|
||||
if sys.version_info >= (3, 10):
|
||||
@property
|
||||
def co_linetable(self) -> bytes: ...
|
||||
def co_lines(self) -> Iterator[tuple[int, int, int | None]]: ...
|
||||
if sys.version_info >= (3, 11):
|
||||
@property
|
||||
def co_exceptiontable(self) -> bytes: ...
|
||||
@property
|
||||
def co_qualname(self) -> str: ...
|
||||
def co_positions(self) -> Iterable[tuple[int | None, int | None, int | None, int | None]]: ...
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
def __new__(
|
||||
cls,
|
||||
argcount: int,
|
||||
posonlyargcount: int,
|
||||
kwonlyargcount: int,
|
||||
nlocals: int,
|
||||
stacksize: int,
|
||||
flags: int,
|
||||
codestring: bytes,
|
||||
constants: tuple[object, ...],
|
||||
names: tuple[str, ...],
|
||||
varnames: tuple[str, ...],
|
||||
filename: str,
|
||||
name: str,
|
||||
qualname: str,
|
||||
firstlineno: int,
|
||||
linetable: bytes,
|
||||
exceptiontable: bytes,
|
||||
freevars: tuple[str, ...] = ...,
|
||||
cellvars: tuple[str, ...] = ...,
|
||||
/,
|
||||
) -> Self: ...
|
||||
elif sys.version_info >= (3, 10):
|
||||
def __new__(
|
||||
cls,
|
||||
argcount: int,
|
||||
posonlyargcount: int,
|
||||
kwonlyargcount: int,
|
||||
nlocals: int,
|
||||
stacksize: int,
|
||||
flags: int,
|
||||
codestring: bytes,
|
||||
constants: tuple[object, ...],
|
||||
names: tuple[str, ...],
|
||||
varnames: tuple[str, ...],
|
||||
filename: str,
|
||||
name: str,
|
||||
firstlineno: int,
|
||||
linetable: bytes,
|
||||
freevars: tuple[str, ...] = ...,
|
||||
cellvars: tuple[str, ...] = ...,
|
||||
/,
|
||||
) -> Self: ...
|
||||
else:
|
||||
def __new__(
|
||||
cls,
|
||||
argcount: int,
|
||||
posonlyargcount: int,
|
||||
kwonlyargcount: int,
|
||||
nlocals: int,
|
||||
stacksize: int,
|
||||
flags: int,
|
||||
codestring: bytes,
|
||||
constants: tuple[object, ...],
|
||||
names: tuple[str, ...],
|
||||
varnames: tuple[str, ...],
|
||||
filename: str,
|
||||
name: str,
|
||||
firstlineno: int,
|
||||
lnotab: bytes,
|
||||
freevars: tuple[str, ...] = ...,
|
||||
cellvars: tuple[str, ...] = ...,
|
||||
/,
|
||||
) -> Self: ...
|
||||
if sys.version_info >= (3, 11):
|
||||
def replace(
|
||||
self,
|
||||
*,
|
||||
co_argcount: int = -1,
|
||||
co_posonlyargcount: int = -1,
|
||||
co_kwonlyargcount: int = -1,
|
||||
co_nlocals: int = -1,
|
||||
co_stacksize: int = -1,
|
||||
co_flags: int = -1,
|
||||
co_firstlineno: int = -1,
|
||||
co_code: bytes = ...,
|
||||
co_consts: tuple[object, ...] = ...,
|
||||
co_names: tuple[str, ...] = ...,
|
||||
co_varnames: tuple[str, ...] = ...,
|
||||
co_freevars: tuple[str, ...] = ...,
|
||||
co_cellvars: tuple[str, ...] = ...,
|
||||
co_filename: str = ...,
|
||||
co_name: str = ...,
|
||||
co_qualname: str = ...,
|
||||
co_linetable: bytes = ...,
|
||||
co_exceptiontable: bytes = ...,
|
||||
) -> Self: ...
|
||||
elif sys.version_info >= (3, 10):
|
||||
def replace(
|
||||
self,
|
||||
*,
|
||||
co_argcount: int = -1,
|
||||
co_posonlyargcount: int = -1,
|
||||
co_kwonlyargcount: int = -1,
|
||||
co_nlocals: int = -1,
|
||||
co_stacksize: int = -1,
|
||||
co_flags: int = -1,
|
||||
co_firstlineno: int = -1,
|
||||
co_code: bytes = ...,
|
||||
co_consts: tuple[object, ...] = ...,
|
||||
co_names: tuple[str, ...] = ...,
|
||||
co_varnames: tuple[str, ...] = ...,
|
||||
co_freevars: tuple[str, ...] = ...,
|
||||
co_cellvars: tuple[str, ...] = ...,
|
||||
co_filename: str = ...,
|
||||
co_name: str = ...,
|
||||
co_linetable: bytes = ...,
|
||||
) -> Self: ...
|
||||
else:
|
||||
def replace(
|
||||
self,
|
||||
*,
|
||||
co_argcount: int = -1,
|
||||
co_posonlyargcount: int = -1,
|
||||
co_kwonlyargcount: int = -1,
|
||||
co_nlocals: int = -1,
|
||||
co_stacksize: int = -1,
|
||||
co_flags: int = -1,
|
||||
co_firstlineno: int = -1,
|
||||
co_code: bytes = ...,
|
||||
co_consts: tuple[object, ...] = ...,
|
||||
co_names: tuple[str, ...] = ...,
|
||||
co_varnames: tuple[str, ...] = ...,
|
||||
co_freevars: tuple[str, ...] = ...,
|
||||
co_cellvars: tuple[str, ...] = ...,
|
||||
co_filename: str = ...,
|
||||
co_name: str = ...,
|
||||
co_lnotab: bytes = ...,
|
||||
) -> Self: ...
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
__replace__ = replace
|
||||
|
||||
@final
|
||||
class MappingProxyType(Mapping[_KT, _VT_co]):
|
||||
__hash__: ClassVar[None] # type: ignore[assignment]
|
||||
def __new__(cls, mapping: SupportsKeysAndGetItem[_KT, _VT_co]) -> Self: ...
|
||||
def __getitem__(self, key: _KT, /) -> _VT_co: ...
|
||||
def __iter__(self) -> Iterator[_KT]: ...
|
||||
def __len__(self) -> int: ...
|
||||
def __eq__(self, value: object, /) -> bool: ...
|
||||
def copy(self) -> dict[_KT, _VT_co]: ...
|
||||
def keys(self) -> KeysView[_KT]: ...
|
||||
def values(self) -> ValuesView[_VT_co]: ...
|
||||
def items(self) -> ItemsView[_KT, _VT_co]: ...
|
||||
@overload
|
||||
def get(self, key: _KT, /) -> _VT_co | None: ...
|
||||
@overload
|
||||
def get(self, key: _KT, default: _VT_co | _T2, /) -> _VT_co | _T2: ...
|
||||
if sys.version_info >= (3, 9):
|
||||
def __class_getitem__(cls, item: Any, /) -> GenericAlias: ...
|
||||
def __reversed__(self) -> Iterator[_KT]: ...
|
||||
def __or__(self, value: Mapping[_T1, _T2], /) -> dict[_KT | _T1, _VT_co | _T2]: ...
|
||||
def __ror__(self, value: Mapping[_T1, _T2], /) -> dict[_KT | _T1, _VT_co | _T2]: ...
|
||||
|
||||
class SimpleNamespace:
|
||||
__hash__: ClassVar[None] # type: ignore[assignment]
|
||||
if sys.version_info >= (3, 13):
|
||||
def __init__(self, mapping_or_iterable: Mapping[str, Any] | Iterable[tuple[str, Any]] = (), /, **kwargs: Any) -> None: ...
|
||||
else:
|
||||
def __init__(self, **kwargs: Any) -> None: ...
|
||||
|
||||
def __eq__(self, value: object, /) -> bool: ...
|
||||
def __getattribute__(self, name: str, /) -> Any: ...
|
||||
def __setattr__(self, name: str, value: Any, /) -> None: ...
|
||||
def __delattr__(self, name: str, /) -> None: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
def __replace__(self, **kwargs: Any) -> Self: ...
|
||||
|
||||
class ModuleType:
|
||||
__name__: str
|
||||
__file__: str | None
|
||||
@property
|
||||
def __dict__(self) -> dict[str, Any]: ... # type: ignore[override]
|
||||
__loader__: LoaderProtocol | None
|
||||
__package__: str | None
|
||||
__path__: MutableSequence[str]
|
||||
__spec__: ModuleSpec | None
|
||||
# N.B. Although this is the same type as `builtins.object.__doc__`,
|
||||
# it is deliberately redeclared here. Most symbols declared in the namespace
|
||||
# of `types.ModuleType` are available as "implicit globals" within a module's
|
||||
# namespace, but this is not true for symbols declared in the namespace of `builtins.object`.
|
||||
# Redeclaring `__doc__` here helps some type checkers understand that `__doc__` is available
|
||||
# as an implicit global in all modules, similar to `__name__`, `__file__`, `__spec__`, etc.
|
||||
__doc__: str | None
|
||||
def __init__(self, name: str, doc: str | None = ...) -> None: ...
|
||||
# __getattr__ doesn't exist at runtime,
|
||||
# but having it here in typeshed makes dynamic imports
|
||||
# using `builtins.__import__` or `importlib.import_module` less painful
|
||||
def __getattr__(self, name: str) -> Any: ...
|
||||
|
||||
@final
|
||||
class CellType:
|
||||
def __new__(cls, contents: object = ..., /) -> Self: ...
|
||||
__hash__: ClassVar[None] # type: ignore[assignment]
|
||||
cell_contents: Any
|
||||
|
||||
_YieldT_co = TypeVar("_YieldT_co", covariant=True)
|
||||
_SendT_contra = TypeVar("_SendT_contra", contravariant=True)
|
||||
_ReturnT_co = TypeVar("_ReturnT_co", covariant=True)
|
||||
|
||||
@final
|
||||
class GeneratorType(Generator[_YieldT_co, _SendT_contra, _ReturnT_co]):
|
||||
@property
|
||||
def gi_yieldfrom(self) -> GeneratorType[_YieldT_co, _SendT_contra, Any] | None: ...
|
||||
if sys.version_info >= (3, 11):
|
||||
@property
|
||||
def gi_suspended(self) -> bool: ...
|
||||
__name__: str
|
||||
__qualname__: str
|
||||
def __iter__(self) -> Self: ...
|
||||
def __next__(self) -> _YieldT_co: ...
|
||||
def send(self, arg: _SendT_contra, /) -> _YieldT_co: ...
|
||||
@overload
|
||||
def throw(self, typ: type[BaseException], val: BaseException | object = ..., tb: TracebackType | None = ..., /) -> _YieldT_co: ...
|
||||
@overload
|
||||
def throw(self, typ: BaseException, val: None = None, tb: TracebackType | None = ..., /) -> _YieldT_co: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
def __class_getitem__(cls, item: Any, /) -> Any: ...
|
||||
|
||||
@final
|
||||
class AsyncGeneratorType(AsyncGenerator[_YieldT_co, _SendT_contra]):
|
||||
@property
|
||||
def ag_await(self) -> Awaitable[Any] | None: ...
|
||||
__name__: str
|
||||
__qualname__: str
|
||||
if sys.version_info >= (3, 12):
|
||||
@property
|
||||
def ag_suspended(self) -> bool: ...
|
||||
|
||||
def __aiter__(self) -> Self: ...
|
||||
def __anext__(self) -> Coroutine[Any, Any, _YieldT_co]: ...
|
||||
def asend(self, val: _SendT_contra, /) -> Coroutine[Any, Any, _YieldT_co]: ...
|
||||
@overload
|
||||
async def athrow(
|
||||
self, typ: type[BaseException], val: BaseException | object = ..., tb: TracebackType | None = ..., /
|
||||
) -> _YieldT_co: ...
|
||||
@overload
|
||||
async def athrow(self, typ: BaseException, val: None = None, tb: TracebackType | None = ..., /) -> _YieldT_co: ...
|
||||
def aclose(self) -> Coroutine[Any, Any, None]: ...
|
||||
if sys.version_info >= (3, 9):
|
||||
def __class_getitem__(cls, item: Any, /) -> GenericAlias: ...
|
||||
|
||||
@final
|
||||
class CoroutineType(Coroutine[_YieldT_co, _SendT_contra, _ReturnT_co]):
|
||||
__name__: str
|
||||
__qualname__: str
|
||||
@property
|
||||
def cr_origin(self) -> tuple[tuple[str, int, str], ...] | None: ...
|
||||
if sys.version_info >= (3, 11):
|
||||
@property
|
||||
def cr_suspended(self) -> bool: ...
|
||||
|
||||
def close(self) -> None: ...
|
||||
def __await__(self) -> Generator[Any, None, _ReturnT_co]: ...
|
||||
def send(self, arg: _SendT_contra, /) -> _YieldT_co: ...
|
||||
@overload
|
||||
def throw(self, typ: type[BaseException], val: BaseException | object = ..., tb: TracebackType | None = ..., /) -> _YieldT_co: ...
|
||||
@overload
|
||||
def throw(self, typ: BaseException, val: None = None, tb: TracebackType | None = ..., /) -> _YieldT_co: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
def __class_getitem__(cls, item: Any, /) -> Any: ...
|
||||
|
||||
@final
|
||||
class MethodType:
|
||||
@property
|
||||
def __closure__(self) -> tuple[CellType, ...] | None: ... # inherited from the added function
|
||||
@property
|
||||
def __code__(self) -> CodeType: ... # inherited from the added function
|
||||
@property
|
||||
def __defaults__(self) -> tuple[Any, ...] | None: ... # inherited from the added function
|
||||
@property
|
||||
def __func__(self) -> Callable[..., Any]: ...
|
||||
@property
|
||||
def __self__(self) -> object: ...
|
||||
@property
|
||||
def __name__(self) -> str: ... # inherited from the added function
|
||||
@property
|
||||
def __qualname__(self) -> str: ... # inherited from the added function
|
||||
def __new__(cls, func: Callable[..., Any], obj: object, /) -> Self: ...
|
||||
def __call__(self, *args: Any, **kwargs: Any) -> Any: ...
|
||||
def __eq__(self, value: object, /) -> bool: ...
|
||||
def __hash__(self) -> int: ...
|
||||
|
||||
@final
|
||||
class BuiltinFunctionType:
|
||||
@property
|
||||
def __self__(self) -> object | ModuleType: ...
|
||||
@property
|
||||
def __name__(self) -> str: ...
|
||||
@property
|
||||
def __qualname__(self) -> str: ...
|
||||
def __call__(self, *args: Any, **kwargs: Any) -> Any: ...
|
||||
def __eq__(self, value: object, /) -> bool: ...
|
||||
def __hash__(self) -> int: ...
|
||||
|
||||
BuiltinMethodType = BuiltinFunctionType
|
||||
|
||||
@final
|
||||
class WrapperDescriptorType:
|
||||
@property
|
||||
def __name__(self) -> str: ...
|
||||
@property
|
||||
def __qualname__(self) -> str: ...
|
||||
@property
|
||||
def __objclass__(self) -> type: ...
|
||||
def __call__(self, *args: Any, **kwargs: Any) -> Any: ...
|
||||
def __get__(self, instance: Any, owner: type | None = None, /) -> Any: ...
|
||||
|
||||
@final
|
||||
class MethodWrapperType:
|
||||
@property
|
||||
def __self__(self) -> object: ...
|
||||
@property
|
||||
def __name__(self) -> str: ...
|
||||
@property
|
||||
def __qualname__(self) -> str: ...
|
||||
@property
|
||||
def __objclass__(self) -> type: ...
|
||||
def __call__(self, *args: Any, **kwargs: Any) -> Any: ...
|
||||
def __eq__(self, value: object, /) -> bool: ...
|
||||
def __ne__(self, value: object, /) -> bool: ...
|
||||
def __hash__(self) -> int: ...
|
||||
|
||||
@final
|
||||
class MethodDescriptorType:
|
||||
@property
|
||||
def __name__(self) -> str: ...
|
||||
@property
|
||||
def __qualname__(self) -> str: ...
|
||||
@property
|
||||
def __objclass__(self) -> type: ...
|
||||
def __call__(self, *args: Any, **kwargs: Any) -> Any: ...
|
||||
def __get__(self, instance: Any, owner: type | None = None, /) -> Any: ...
|
||||
|
||||
@final
|
||||
class ClassMethodDescriptorType:
|
||||
@property
|
||||
def __name__(self) -> str: ...
|
||||
@property
|
||||
def __qualname__(self) -> str: ...
|
||||
@property
|
||||
def __objclass__(self) -> type: ...
|
||||
def __call__(self, *args: Any, **kwargs: Any) -> Any: ...
|
||||
def __get__(self, instance: Any, owner: type | None = None, /) -> Any: ...
|
||||
|
||||
@final
|
||||
class TracebackType:
|
||||
def __new__(cls, tb_next: TracebackType | None, tb_frame: FrameType, tb_lasti: int, tb_lineno: int) -> Self: ...
|
||||
tb_next: TracebackType | None
|
||||
# the rest are read-only
|
||||
@property
|
||||
def tb_frame(self) -> FrameType: ...
|
||||
@property
|
||||
def tb_lasti(self) -> int: ...
|
||||
@property
|
||||
def tb_lineno(self) -> int: ...
|
||||
|
||||
@final
|
||||
class FrameType:
|
||||
@property
|
||||
def f_back(self) -> FrameType | None: ...
|
||||
@property
|
||||
def f_builtins(self) -> dict[str, Any]: ...
|
||||
@property
|
||||
def f_code(self) -> CodeType: ...
|
||||
@property
|
||||
def f_globals(self) -> dict[str, Any]: ...
|
||||
@property
|
||||
def f_lasti(self) -> int: ...
|
||||
# see discussion in #6769: f_lineno *can* sometimes be None,
|
||||
# but you should probably file a bug report with CPython if you encounter it being None in the wild.
|
||||
# An `int | None` annotation here causes too many false-positive errors, so applying `int | Any`.
|
||||
@property
|
||||
def f_lineno(self) -> int | MaybeNone: ...
|
||||
@property
|
||||
def f_locals(self) -> dict[str, Any]: ...
|
||||
f_trace: Callable[[FrameType, str, Any], Any] | None
|
||||
f_trace_lines: bool
|
||||
f_trace_opcodes: bool
|
||||
def clear(self) -> None: ...
|
||||
|
||||
@final
|
||||
class GetSetDescriptorType:
|
||||
@property
|
||||
def __name__(self) -> str: ...
|
||||
@property
|
||||
def __qualname__(self) -> str: ...
|
||||
@property
|
||||
def __objclass__(self) -> type: ...
|
||||
def __get__(self, instance: Any, owner: type | None = None, /) -> Any: ...
|
||||
def __set__(self, instance: Any, value: Any, /) -> None: ...
|
||||
def __delete__(self, instance: Any, /) -> None: ...
|
||||
|
||||
@final
|
||||
class MemberDescriptorType:
|
||||
@property
|
||||
def __name__(self) -> str: ...
|
||||
@property
|
||||
def __qualname__(self) -> str: ...
|
||||
@property
|
||||
def __objclass__(self) -> type: ...
|
||||
def __get__(self, instance: Any, owner: type | None = None, /) -> Any: ...
|
||||
def __set__(self, instance: Any, value: Any, /) -> None: ...
|
||||
def __delete__(self, instance: Any, /) -> None: ...
|
||||
|
||||
def new_class(
|
||||
name: str,
|
||||
bases: Iterable[object] = (),
|
||||
kwds: dict[str, Any] | None = None,
|
||||
exec_body: Callable[[dict[str, Any]], object] | None = None,
|
||||
) -> type: ...
|
||||
def resolve_bases(bases: Iterable[object]) -> tuple[Any, ...]: ...
|
||||
def prepare_class(
|
||||
name: str, bases: tuple[type, ...] = (), kwds: dict[str, Any] | None = None
|
||||
) -> tuple[type, dict[str, Any], dict[str, Any]]: ...
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
def get_original_bases(cls: type, /) -> tuple[Any, ...]: ...
|
||||
|
||||
# Actually a different type, but `property` is special and we want that too.
|
||||
DynamicClassAttribute = property
|
||||
|
||||
_Fn = TypeVar("_Fn", bound=Callable[..., object])
|
||||
_R = TypeVar("_R")
|
||||
_P = ParamSpec("_P")
|
||||
|
||||
# it's not really an Awaitable, but can be used in an await expression. Real type: Generator & Awaitable
|
||||
@overload
|
||||
def coroutine(func: Callable[_P, Generator[Any, Any, _R]]) -> Callable[_P, Awaitable[_R]]: ...
|
||||
@overload
|
||||
def coroutine(func: _Fn) -> _Fn: ...
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
class GenericAlias:
|
||||
@property
|
||||
def __origin__(self) -> type: ...
|
||||
@property
|
||||
def __args__(self) -> tuple[Any, ...]: ...
|
||||
@property
|
||||
def __parameters__(self) -> tuple[Any, ...]: ...
|
||||
def __new__(cls, origin: type, args: Any) -> Self: ...
|
||||
def __getitem__(self, typeargs: Any, /) -> GenericAlias: ...
|
||||
def __eq__(self, value: object, /) -> bool: ...
|
||||
def __hash__(self) -> int: ...
|
||||
if sys.version_info >= (3, 11):
|
||||
@property
|
||||
def __unpacked__(self) -> bool: ...
|
||||
@property
|
||||
def __typing_unpacked_tuple_args__(self) -> tuple[Any, ...] | None: ...
|
||||
if sys.version_info >= (3, 10):
|
||||
def __or__(self, value: Any, /) -> UnionType: ...
|
||||
def __ror__(self, value: Any, /) -> UnionType: ...
|
||||
|
||||
# GenericAlias delegates attr access to `__origin__`
|
||||
def __getattr__(self, name: str) -> Any: ...
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
@final
|
||||
class NoneType:
|
||||
def __bool__(self) -> Literal[False]: ...
|
||||
|
||||
@final
|
||||
class EllipsisType: ...
|
||||
|
||||
from builtins import _NotImplementedType
|
||||
|
||||
NotImplementedType = _NotImplementedType
|
||||
@final
|
||||
class UnionType:
|
||||
@property
|
||||
def __args__(self) -> tuple[Any, ...]: ...
|
||||
def __or__(self, value: Any, /) -> UnionType: ...
|
||||
def __ror__(self, value: Any, /) -> UnionType: ...
|
||||
def __eq__(self, value: object, /) -> bool: ...
|
||||
def __hash__(self) -> int: ...
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
@final
|
||||
class CapsuleType: ...
|
||||
1075
.venv/lib/python3.12/site-packages/stdlib/typing.pyi
Normal file
1075
.venv/lib/python3.12/site-packages/stdlib/typing.pyi
Normal file
File diff suppressed because it is too large
Load Diff
531
.venv/lib/python3.12/site-packages/stdlib/typing_extensions.pyi
Normal file
531
.venv/lib/python3.12/site-packages/stdlib/typing_extensions.pyi
Normal file
@@ -0,0 +1,531 @@
|
||||
import abc
|
||||
import sys
|
||||
import typing
|
||||
from _collections_abc import dict_items, dict_keys, dict_values
|
||||
from _typeshed import IdentityFunction
|
||||
from contextlib import AbstractAsyncContextManager as AsyncContextManager, AbstractContextManager as ContextManager
|
||||
from typing import ( # noqa: Y022,Y037,Y038,Y039
|
||||
IO as IO,
|
||||
TYPE_CHECKING as TYPE_CHECKING,
|
||||
AbstractSet as AbstractSet,
|
||||
Any as Any,
|
||||
AnyStr as AnyStr,
|
||||
AsyncGenerator as AsyncGenerator,
|
||||
AsyncIterable as AsyncIterable,
|
||||
AsyncIterator as AsyncIterator,
|
||||
Awaitable as Awaitable,
|
||||
BinaryIO as BinaryIO,
|
||||
Callable as Callable,
|
||||
ChainMap as ChainMap,
|
||||
ClassVar as ClassVar,
|
||||
Collection as Collection,
|
||||
Container as Container,
|
||||
Coroutine as Coroutine,
|
||||
Counter as Counter,
|
||||
DefaultDict as DefaultDict,
|
||||
Deque as Deque,
|
||||
Dict as Dict,
|
||||
ForwardRef as ForwardRef,
|
||||
FrozenSet as FrozenSet,
|
||||
Generator as Generator,
|
||||
Generic as Generic,
|
||||
Hashable as Hashable,
|
||||
ItemsView as ItemsView,
|
||||
Iterable as Iterable,
|
||||
Iterator as Iterator,
|
||||
KeysView as KeysView,
|
||||
List as List,
|
||||
Mapping as Mapping,
|
||||
MappingView as MappingView,
|
||||
Match as Match,
|
||||
MutableMapping as MutableMapping,
|
||||
MutableSequence as MutableSequence,
|
||||
MutableSet as MutableSet,
|
||||
NoReturn as NoReturn,
|
||||
Optional as Optional,
|
||||
Pattern as Pattern,
|
||||
Reversible as Reversible,
|
||||
Sequence as Sequence,
|
||||
Set as Set,
|
||||
Sized as Sized,
|
||||
SupportsAbs as SupportsAbs,
|
||||
SupportsBytes as SupportsBytes,
|
||||
SupportsComplex as SupportsComplex,
|
||||
SupportsFloat as SupportsFloat,
|
||||
SupportsInt as SupportsInt,
|
||||
SupportsRound as SupportsRound,
|
||||
Text as Text,
|
||||
TextIO as TextIO,
|
||||
Tuple as Tuple,
|
||||
Type as Type,
|
||||
Union as Union,
|
||||
ValuesView as ValuesView,
|
||||
_Alias,
|
||||
cast as cast,
|
||||
no_type_check as no_type_check,
|
||||
no_type_check_decorator as no_type_check_decorator,
|
||||
overload as overload,
|
||||
type_check_only,
|
||||
)
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
from types import UnionType
|
||||
if sys.version_info >= (3, 9):
|
||||
from types import GenericAlias
|
||||
|
||||
__all__ = [
|
||||
"Any",
|
||||
"Buffer",
|
||||
"ClassVar",
|
||||
"Concatenate",
|
||||
"Final",
|
||||
"LiteralString",
|
||||
"ParamSpec",
|
||||
"ParamSpecArgs",
|
||||
"ParamSpecKwargs",
|
||||
"Self",
|
||||
"Type",
|
||||
"TypeVar",
|
||||
"TypeVarTuple",
|
||||
"Unpack",
|
||||
"Awaitable",
|
||||
"AsyncIterator",
|
||||
"AsyncIterable",
|
||||
"Coroutine",
|
||||
"AsyncGenerator",
|
||||
"AsyncContextManager",
|
||||
"CapsuleType",
|
||||
"ChainMap",
|
||||
"ContextManager",
|
||||
"Counter",
|
||||
"Deque",
|
||||
"DefaultDict",
|
||||
"NamedTuple",
|
||||
"OrderedDict",
|
||||
"TypedDict",
|
||||
"SupportsIndex",
|
||||
"SupportsAbs",
|
||||
"SupportsRound",
|
||||
"SupportsBytes",
|
||||
"SupportsComplex",
|
||||
"SupportsFloat",
|
||||
"SupportsInt",
|
||||
"Annotated",
|
||||
"assert_never",
|
||||
"assert_type",
|
||||
"dataclass_transform",
|
||||
"deprecated",
|
||||
"final",
|
||||
"IntVar",
|
||||
"is_typeddict",
|
||||
"Literal",
|
||||
"NewType",
|
||||
"overload",
|
||||
"override",
|
||||
"Protocol",
|
||||
"reveal_type",
|
||||
"runtime",
|
||||
"runtime_checkable",
|
||||
"Text",
|
||||
"TypeAlias",
|
||||
"TypeAliasType",
|
||||
"TypeGuard",
|
||||
"TYPE_CHECKING",
|
||||
"Never",
|
||||
"NoReturn",
|
||||
"Required",
|
||||
"NotRequired",
|
||||
"clear_overloads",
|
||||
"get_args",
|
||||
"get_origin",
|
||||
"get_original_bases",
|
||||
"get_overloads",
|
||||
"get_type_hints",
|
||||
"AbstractSet",
|
||||
"AnyStr",
|
||||
"BinaryIO",
|
||||
"Callable",
|
||||
"Collection",
|
||||
"Container",
|
||||
"Dict",
|
||||
"Doc",
|
||||
"ForwardRef",
|
||||
"FrozenSet",
|
||||
"Generator",
|
||||
"Generic",
|
||||
"Hashable",
|
||||
"IO",
|
||||
"ItemsView",
|
||||
"Iterable",
|
||||
"Iterator",
|
||||
"KeysView",
|
||||
"List",
|
||||
"Mapping",
|
||||
"MappingView",
|
||||
"Match",
|
||||
"MutableMapping",
|
||||
"MutableSequence",
|
||||
"MutableSet",
|
||||
"NoDefault",
|
||||
"Optional",
|
||||
"Pattern",
|
||||
"Reversible",
|
||||
"Sequence",
|
||||
"Set",
|
||||
"Sized",
|
||||
"TextIO",
|
||||
"Tuple",
|
||||
"Union",
|
||||
"ValuesView",
|
||||
"cast",
|
||||
"get_protocol_members",
|
||||
"is_protocol",
|
||||
"no_type_check",
|
||||
"no_type_check_decorator",
|
||||
"ReadOnly",
|
||||
"TypeIs",
|
||||
]
|
||||
|
||||
_T = typing.TypeVar("_T")
|
||||
_F = typing.TypeVar("_F", bound=Callable[..., Any])
|
||||
_TC = typing.TypeVar("_TC", bound=type[object])
|
||||
|
||||
# unfortunately we have to duplicate this class definition from typing.pyi or we break pytype
|
||||
class _SpecialForm:
|
||||
def __getitem__(self, parameters: Any) -> object: ...
|
||||
if sys.version_info >= (3, 10):
|
||||
def __or__(self, other: Any) -> _SpecialForm: ...
|
||||
def __ror__(self, other: Any) -> _SpecialForm: ...
|
||||
|
||||
# Do not import (and re-export) Protocol or runtime_checkable from
|
||||
# typing module because type checkers need to be able to distinguish
|
||||
# typing.Protocol and typing_extensions.Protocol so they can properly
|
||||
# warn users about potential runtime exceptions when using typing.Protocol
|
||||
# on older versions of Python.
|
||||
Protocol: _SpecialForm
|
||||
|
||||
def runtime_checkable(cls: _TC) -> _TC: ...
|
||||
|
||||
# This alias for above is kept here for backwards compatibility.
|
||||
runtime = runtime_checkable
|
||||
Final: _SpecialForm
|
||||
|
||||
def final(f: _F) -> _F: ...
|
||||
|
||||
Literal: _SpecialForm
|
||||
|
||||
def IntVar(name: str) -> Any: ... # returns a new TypeVar
|
||||
|
||||
# Internal mypy fallback type for all typed dicts (does not exist at runtime)
|
||||
# N.B. Keep this mostly in sync with typing._TypedDict/mypy_extensions._TypedDict
|
||||
@type_check_only
|
||||
class _TypedDict(Mapping[str, object], metaclass=abc.ABCMeta):
|
||||
__required_keys__: ClassVar[frozenset[str]]
|
||||
__optional_keys__: ClassVar[frozenset[str]]
|
||||
__total__: ClassVar[bool]
|
||||
__orig_bases__: ClassVar[tuple[Any, ...]]
|
||||
# PEP 705
|
||||
__readonly_keys__: ClassVar[frozenset[str]]
|
||||
__mutable_keys__: ClassVar[frozenset[str]]
|
||||
# PEP 728
|
||||
__closed__: ClassVar[bool]
|
||||
__extra_items__: ClassVar[Any]
|
||||
def copy(self) -> Self: ...
|
||||
# Using Never so that only calls using mypy plugin hook that specialize the signature
|
||||
# can go through.
|
||||
def setdefault(self, k: Never, default: object) -> object: ...
|
||||
# Mypy plugin hook for 'pop' expects that 'default' has a type variable type.
|
||||
def pop(self, k: Never, default: _T = ...) -> object: ... # pyright: ignore[reportInvalidTypeVarUse]
|
||||
def update(self: _T, m: _T, /) -> None: ...
|
||||
def items(self) -> dict_items[str, object]: ...
|
||||
def keys(self) -> dict_keys[str, object]: ...
|
||||
def values(self) -> dict_values[str, object]: ...
|
||||
def __delitem__(self, k: Never) -> None: ...
|
||||
if sys.version_info >= (3, 9):
|
||||
@overload
|
||||
def __or__(self, value: Self, /) -> Self: ...
|
||||
@overload
|
||||
def __or__(self, value: dict[str, Any], /) -> dict[str, object]: ...
|
||||
@overload
|
||||
def __ror__(self, value: Self, /) -> Self: ...
|
||||
@overload
|
||||
def __ror__(self, value: dict[str, Any], /) -> dict[str, object]: ...
|
||||
# supposedly incompatible definitions of `__ior__` and `__or__`:
|
||||
def __ior__(self, value: Self, /) -> Self: ... # type: ignore[misc]
|
||||
|
||||
# TypedDict is a (non-subscriptable) special form.
|
||||
TypedDict: object
|
||||
|
||||
OrderedDict = _Alias()
|
||||
|
||||
def get_type_hints(
|
||||
obj: Callable[..., Any],
|
||||
globalns: dict[str, Any] | None = None,
|
||||
localns: Mapping[str, Any] | None = None,
|
||||
include_extras: bool = False,
|
||||
) -> dict[str, Any]: ...
|
||||
def get_args(tp: Any) -> tuple[Any, ...]: ...
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
@overload
|
||||
def get_origin(tp: UnionType) -> type[UnionType]: ...
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
@overload
|
||||
def get_origin(tp: GenericAlias) -> type: ...
|
||||
|
||||
@overload
|
||||
def get_origin(tp: ParamSpecArgs | ParamSpecKwargs) -> ParamSpec: ...
|
||||
@overload
|
||||
def get_origin(tp: Any) -> Any | None: ...
|
||||
|
||||
Annotated: _SpecialForm
|
||||
_AnnotatedAlias: Any # undocumented
|
||||
|
||||
@runtime_checkable
|
||||
class SupportsIndex(Protocol, metaclass=abc.ABCMeta):
|
||||
@abc.abstractmethod
|
||||
def __index__(self) -> int: ...
|
||||
|
||||
# New and changed things in 3.10
|
||||
if sys.version_info >= (3, 10):
|
||||
from typing import (
|
||||
Concatenate as Concatenate,
|
||||
ParamSpecArgs as ParamSpecArgs,
|
||||
ParamSpecKwargs as ParamSpecKwargs,
|
||||
TypeAlias as TypeAlias,
|
||||
TypeGuard as TypeGuard,
|
||||
is_typeddict as is_typeddict,
|
||||
)
|
||||
else:
|
||||
@final
|
||||
class ParamSpecArgs:
|
||||
@property
|
||||
def __origin__(self) -> ParamSpec: ...
|
||||
def __init__(self, origin: ParamSpec) -> None: ...
|
||||
|
||||
@final
|
||||
class ParamSpecKwargs:
|
||||
@property
|
||||
def __origin__(self) -> ParamSpec: ...
|
||||
def __init__(self, origin: ParamSpec) -> None: ...
|
||||
|
||||
Concatenate: _SpecialForm
|
||||
TypeAlias: _SpecialForm
|
||||
TypeGuard: _SpecialForm
|
||||
def is_typeddict(tp: object) -> bool: ...
|
||||
|
||||
# New and changed things in 3.11
|
||||
if sys.version_info >= (3, 11):
|
||||
from typing import (
|
||||
LiteralString as LiteralString,
|
||||
NamedTuple as NamedTuple,
|
||||
Never as Never,
|
||||
NewType as NewType,
|
||||
NotRequired as NotRequired,
|
||||
Required as Required,
|
||||
Self as Self,
|
||||
Unpack as Unpack,
|
||||
assert_never as assert_never,
|
||||
assert_type as assert_type,
|
||||
clear_overloads as clear_overloads,
|
||||
dataclass_transform as dataclass_transform,
|
||||
get_overloads as get_overloads,
|
||||
reveal_type as reveal_type,
|
||||
)
|
||||
else:
|
||||
Self: _SpecialForm
|
||||
Never: _SpecialForm
|
||||
def reveal_type(obj: _T, /) -> _T: ...
|
||||
def assert_never(arg: Never, /) -> Never: ...
|
||||
def assert_type(val: _T, typ: Any, /) -> _T: ...
|
||||
def clear_overloads() -> None: ...
|
||||
def get_overloads(func: Callable[..., object]) -> Sequence[Callable[..., object]]: ...
|
||||
|
||||
Required: _SpecialForm
|
||||
NotRequired: _SpecialForm
|
||||
LiteralString: _SpecialForm
|
||||
Unpack: _SpecialForm
|
||||
|
||||
def dataclass_transform(
|
||||
*,
|
||||
eq_default: bool = True,
|
||||
order_default: bool = False,
|
||||
kw_only_default: bool = False,
|
||||
frozen_default: bool = False,
|
||||
field_specifiers: tuple[type[Any] | Callable[..., Any], ...] = (),
|
||||
**kwargs: object,
|
||||
) -> IdentityFunction: ...
|
||||
|
||||
class NamedTuple(tuple[Any, ...]):
|
||||
if sys.version_info < (3, 9):
|
||||
_field_types: ClassVar[dict[str, type]]
|
||||
_field_defaults: ClassVar[dict[str, Any]]
|
||||
_fields: ClassVar[tuple[str, ...]]
|
||||
__orig_bases__: ClassVar[tuple[Any, ...]]
|
||||
@overload
|
||||
def __init__(self, typename: str, fields: Iterable[tuple[str, Any]] = ...) -> None: ...
|
||||
@overload
|
||||
def __init__(self, typename: str, fields: None = None, **kwargs: Any) -> None: ...
|
||||
@classmethod
|
||||
def _make(cls, iterable: Iterable[Any]) -> Self: ...
|
||||
def _asdict(self) -> dict[str, Any]: ...
|
||||
def _replace(self, **kwargs: Any) -> Self: ...
|
||||
|
||||
class NewType:
|
||||
def __init__(self, name: str, tp: Any) -> None: ...
|
||||
def __call__(self, obj: _T, /) -> _T: ...
|
||||
__supertype__: type | NewType
|
||||
if sys.version_info >= (3, 10):
|
||||
def __or__(self, other: Any) -> _SpecialForm: ...
|
||||
def __ror__(self, other: Any) -> _SpecialForm: ...
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
from collections.abc import Buffer as Buffer
|
||||
from types import get_original_bases as get_original_bases
|
||||
from typing import TypeAliasType as TypeAliasType, override as override
|
||||
else:
|
||||
def override(arg: _F, /) -> _F: ...
|
||||
def get_original_bases(cls: type, /) -> tuple[Any, ...]: ...
|
||||
@final
|
||||
class TypeAliasType:
|
||||
def __init__(self, name: str, value: Any, *, type_params: tuple[TypeVar | ParamSpec | TypeVarTuple, ...] = ()) -> None: ...
|
||||
@property
|
||||
def __value__(self) -> Any: ...
|
||||
@property
|
||||
def __type_params__(self) -> tuple[TypeVar | ParamSpec | TypeVarTuple, ...]: ...
|
||||
@property
|
||||
def __parameters__(self) -> tuple[Any, ...]: ...
|
||||
@property
|
||||
def __name__(self) -> str: ...
|
||||
# It's writable on types, but not on instances of TypeAliasType.
|
||||
@property
|
||||
def __module__(self) -> str | None: ... # type: ignore[override]
|
||||
# Returns typing._GenericAlias, which isn't stubbed.
|
||||
def __getitem__(self, parameters: Any) -> Any: ...
|
||||
if sys.version_info >= (3, 10):
|
||||
def __or__(self, right: Any) -> _SpecialForm: ...
|
||||
def __ror__(self, left: Any) -> _SpecialForm: ...
|
||||
|
||||
@runtime_checkable
|
||||
class Buffer(Protocol):
|
||||
# Not actually a Protocol at runtime; see
|
||||
# https://github.com/python/typeshed/issues/10224 for why we're defining it this way
|
||||
def __buffer__(self, flags: int, /) -> memoryview: ...
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
from types import CapsuleType as CapsuleType
|
||||
from typing import (
|
||||
NoDefault as NoDefault,
|
||||
ParamSpec as ParamSpec,
|
||||
ReadOnly as ReadOnly,
|
||||
TypeIs as TypeIs,
|
||||
TypeVar as TypeVar,
|
||||
TypeVarTuple as TypeVarTuple,
|
||||
get_protocol_members as get_protocol_members,
|
||||
is_protocol as is_protocol,
|
||||
)
|
||||
from warnings import deprecated as deprecated
|
||||
else:
|
||||
def is_protocol(tp: type, /) -> bool: ...
|
||||
def get_protocol_members(tp: type, /) -> frozenset[str]: ...
|
||||
@final
|
||||
class _NoDefaultType: ...
|
||||
|
||||
NoDefault: _NoDefaultType
|
||||
@final
|
||||
class CapsuleType: ...
|
||||
|
||||
class deprecated:
|
||||
message: LiteralString
|
||||
category: type[Warning] | None
|
||||
stacklevel: int
|
||||
def __init__(self, message: LiteralString, /, *, category: type[Warning] | None = ..., stacklevel: int = 1) -> None: ...
|
||||
def __call__(self, arg: _T, /) -> _T: ...
|
||||
|
||||
@final
|
||||
class TypeVar:
|
||||
@property
|
||||
def __name__(self) -> str: ...
|
||||
@property
|
||||
def __bound__(self) -> Any | None: ...
|
||||
@property
|
||||
def __constraints__(self) -> tuple[Any, ...]: ...
|
||||
@property
|
||||
def __covariant__(self) -> bool: ...
|
||||
@property
|
||||
def __contravariant__(self) -> bool: ...
|
||||
@property
|
||||
def __infer_variance__(self) -> bool: ...
|
||||
@property
|
||||
def __default__(self) -> Any: ...
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
*constraints: Any,
|
||||
bound: Any | None = None,
|
||||
covariant: bool = False,
|
||||
contravariant: bool = False,
|
||||
default: Any = ...,
|
||||
infer_variance: bool = False,
|
||||
) -> None: ...
|
||||
def has_default(self) -> bool: ...
|
||||
def __typing_prepare_subst__(self, alias: Any, args: Any) -> tuple[Any, ...]: ...
|
||||
if sys.version_info >= (3, 10):
|
||||
def __or__(self, right: Any) -> _SpecialForm: ...
|
||||
def __ror__(self, left: Any) -> _SpecialForm: ...
|
||||
if sys.version_info >= (3, 11):
|
||||
def __typing_subst__(self, arg: Any) -> Any: ...
|
||||
|
||||
@final
|
||||
class ParamSpec:
|
||||
@property
|
||||
def __name__(self) -> str: ...
|
||||
@property
|
||||
def __bound__(self) -> Any | None: ...
|
||||
@property
|
||||
def __covariant__(self) -> bool: ...
|
||||
@property
|
||||
def __contravariant__(self) -> bool: ...
|
||||
@property
|
||||
def __infer_variance__(self) -> bool: ...
|
||||
@property
|
||||
def __default__(self) -> Any: ...
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
*,
|
||||
bound: None | type[Any] | str = None,
|
||||
contravariant: bool = False,
|
||||
covariant: bool = False,
|
||||
default: Any = ...,
|
||||
) -> None: ...
|
||||
@property
|
||||
def args(self) -> ParamSpecArgs: ...
|
||||
@property
|
||||
def kwargs(self) -> ParamSpecKwargs: ...
|
||||
def has_default(self) -> bool: ...
|
||||
def __typing_prepare_subst__(self, alias: Any, args: Any) -> tuple[Any, ...]: ...
|
||||
if sys.version_info >= (3, 10):
|
||||
def __or__(self, right: Any) -> _SpecialForm: ...
|
||||
def __ror__(self, left: Any) -> _SpecialForm: ...
|
||||
|
||||
@final
|
||||
class TypeVarTuple:
|
||||
@property
|
||||
def __name__(self) -> str: ...
|
||||
@property
|
||||
def __default__(self) -> Any: ...
|
||||
def __init__(self, name: str, *, default: Any = ...) -> None: ...
|
||||
def __iter__(self) -> Any: ... # Unpack[Self]
|
||||
def has_default(self) -> bool: ...
|
||||
def __typing_prepare_subst__(self, alias: Any, args: Any) -> tuple[Any, ...]: ...
|
||||
|
||||
ReadOnly: _SpecialForm
|
||||
TypeIs: _SpecialForm
|
||||
|
||||
class Doc:
|
||||
documentation: str
|
||||
def __init__(self, documentation: str, /) -> None: ...
|
||||
def __hash__(self) -> int: ...
|
||||
def __eq__(self, other: object) -> bool: ...
|
||||
Reference in New Issue
Block a user