BIG UPDATE
Updated python to 3.10 Updated diffusers to 0.4.1
This commit is contained in:
parent
21c9ae8a8d
commit
238bf88b72
BIN
Python310/DLLs/_asyncio.pyd
Normal file
BIN
Python310/DLLs/_asyncio.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/_asyncio_d.pyd
Normal file
BIN
Python310/DLLs/_asyncio_d.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/_bz2.pyd
Normal file
BIN
Python310/DLLs/_bz2.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/_bz2_d.pyd
Normal file
BIN
Python310/DLLs/_bz2_d.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/_ctypes.pyd
Normal file
BIN
Python310/DLLs/_ctypes.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/_ctypes_d.pyd
Normal file
BIN
Python310/DLLs/_ctypes_d.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/_ctypes_test.pyd
Normal file
BIN
Python310/DLLs/_ctypes_test.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/_ctypes_test_d.pyd
Normal file
BIN
Python310/DLLs/_ctypes_test_d.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/_decimal.pyd
Normal file
BIN
Python310/DLLs/_decimal.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/_decimal_d.pyd
Normal file
BIN
Python310/DLLs/_decimal_d.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/_elementtree.pyd
Normal file
BIN
Python310/DLLs/_elementtree.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/_elementtree_d.pyd
Normal file
BIN
Python310/DLLs/_elementtree_d.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/_hashlib.pyd
Normal file
BIN
Python310/DLLs/_hashlib.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/_hashlib_d.pyd
Normal file
BIN
Python310/DLLs/_hashlib_d.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/_lzma.pyd
Normal file
BIN
Python310/DLLs/_lzma.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/_lzma_d.pyd
Normal file
BIN
Python310/DLLs/_lzma_d.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/_msi.pyd
Normal file
BIN
Python310/DLLs/_msi.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/_msi_d.pyd
Normal file
BIN
Python310/DLLs/_msi_d.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/_multiprocessing.pyd
Normal file
BIN
Python310/DLLs/_multiprocessing.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/_multiprocessing_d.pyd
Normal file
BIN
Python310/DLLs/_multiprocessing_d.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/_overlapped.pyd
Normal file
BIN
Python310/DLLs/_overlapped.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/_overlapped_d.pyd
Normal file
BIN
Python310/DLLs/_overlapped_d.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/_queue.pyd
Normal file
BIN
Python310/DLLs/_queue.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/_queue_d.pyd
Normal file
BIN
Python310/DLLs/_queue_d.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/_socket.pyd
Normal file
BIN
Python310/DLLs/_socket.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/_socket_d.pyd
Normal file
BIN
Python310/DLLs/_socket_d.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/_sqlite3.pyd
Normal file
BIN
Python310/DLLs/_sqlite3.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/_sqlite3_d.pyd
Normal file
BIN
Python310/DLLs/_sqlite3_d.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/_ssl.pyd
Normal file
BIN
Python310/DLLs/_ssl.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/_ssl_d.pyd
Normal file
BIN
Python310/DLLs/_ssl_d.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/_testbuffer.pyd
Normal file
BIN
Python310/DLLs/_testbuffer.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/_testbuffer_d.pyd
Normal file
BIN
Python310/DLLs/_testbuffer_d.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/_testcapi.pyd
Normal file
BIN
Python310/DLLs/_testcapi.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/_testcapi_d.pyd
Normal file
BIN
Python310/DLLs/_testcapi_d.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/_testconsole.pyd
Normal file
BIN
Python310/DLLs/_testconsole.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/_testconsole_d.pyd
Normal file
BIN
Python310/DLLs/_testconsole_d.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/_testimportmultiple.pyd
Normal file
BIN
Python310/DLLs/_testimportmultiple.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/_testimportmultiple_d.pyd
Normal file
BIN
Python310/DLLs/_testimportmultiple_d.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/_testinternalcapi.pyd
Normal file
BIN
Python310/DLLs/_testinternalcapi.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/_testinternalcapi_d.pyd
Normal file
BIN
Python310/DLLs/_testinternalcapi_d.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/_testmultiphase.pyd
Normal file
BIN
Python310/DLLs/_testmultiphase.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/_testmultiphase_d.pyd
Normal file
BIN
Python310/DLLs/_testmultiphase_d.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/_tkinter.pyd
Normal file
BIN
Python310/DLLs/_tkinter.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/_tkinter_d.lib
Normal file
BIN
Python310/DLLs/_tkinter_d.lib
Normal file
Binary file not shown.
BIN
Python310/DLLs/_tkinter_d.pyd
Normal file
BIN
Python310/DLLs/_tkinter_d.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/_uuid.pyd
Normal file
BIN
Python310/DLLs/_uuid.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/_uuid_d.pyd
Normal file
BIN
Python310/DLLs/_uuid_d.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/_zoneinfo.pyd
Normal file
BIN
Python310/DLLs/_zoneinfo.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/_zoneinfo_d.pyd
Normal file
BIN
Python310/DLLs/_zoneinfo_d.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/libcrypto-1_1.dll
Normal file
BIN
Python310/DLLs/libcrypto-1_1.dll
Normal file
Binary file not shown.
BIN
Python310/DLLs/libssl-1_1.dll
Normal file
BIN
Python310/DLLs/libssl-1_1.dll
Normal file
Binary file not shown.
Before Width: | Height: | Size: 74 KiB After Width: | Height: | Size: 74 KiB |
Before Width: | Height: | Size: 77 KiB After Width: | Height: | Size: 77 KiB |
Before Width: | Height: | Size: 81 KiB After Width: | Height: | Size: 81 KiB |
BIN
Python310/DLLs/pyexpat.pyd
Normal file
BIN
Python310/DLLs/pyexpat.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/pyexpat_d.pyd
Normal file
BIN
Python310/DLLs/pyexpat_d.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/python_lib.cat
Normal file
BIN
Python310/DLLs/python_lib.cat
Normal file
Binary file not shown.
BIN
Python310/DLLs/python_tools.cat
Normal file
BIN
Python310/DLLs/python_tools.cat
Normal file
Binary file not shown.
BIN
Python310/DLLs/select.pyd
Normal file
BIN
Python310/DLLs/select.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/select_d.pyd
Normal file
BIN
Python310/DLLs/select_d.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/sqlite3.dll
Normal file
BIN
Python310/DLLs/sqlite3.dll
Normal file
Binary file not shown.
BIN
Python310/DLLs/sqlite3_d.dll
Normal file
BIN
Python310/DLLs/sqlite3_d.dll
Normal file
Binary file not shown.
BIN
Python310/DLLs/tcl86t.dll
Normal file
BIN
Python310/DLLs/tcl86t.dll
Normal file
Binary file not shown.
BIN
Python310/DLLs/tk86t.dll
Normal file
BIN
Python310/DLLs/tk86t.dll
Normal file
Binary file not shown.
BIN
Python310/DLLs/unicodedata.pyd
Normal file
BIN
Python310/DLLs/unicodedata.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/unicodedata_d.pyd
Normal file
BIN
Python310/DLLs/unicodedata_d.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/winsound.pyd
Normal file
BIN
Python310/DLLs/winsound.pyd
Normal file
Binary file not shown.
BIN
Python310/DLLs/winsound_d.pyd
Normal file
BIN
Python310/DLLs/winsound_d.pyd
Normal file
Binary file not shown.
147
Python310/Lib/__future__.py
Normal file
147
Python310/Lib/__future__.py
Normal file
@ -0,0 +1,147 @@
|
|||||||
|
"""Record of phased-in incompatible language changes.
|
||||||
|
|
||||||
|
Each line is of the form:
|
||||||
|
|
||||||
|
FeatureName = "_Feature(" OptionalRelease "," MandatoryRelease ","
|
||||||
|
CompilerFlag ")"
|
||||||
|
|
||||||
|
where, normally, OptionalRelease < MandatoryRelease, and both are 5-tuples
|
||||||
|
of the same form as sys.version_info:
|
||||||
|
|
||||||
|
(PY_MAJOR_VERSION, # the 2 in 2.1.0a3; an int
|
||||||
|
PY_MINOR_VERSION, # the 1; an int
|
||||||
|
PY_MICRO_VERSION, # the 0; an int
|
||||||
|
PY_RELEASE_LEVEL, # "alpha", "beta", "candidate" or "final"; string
|
||||||
|
PY_RELEASE_SERIAL # the 3; an int
|
||||||
|
)
|
||||||
|
|
||||||
|
OptionalRelease records the first release in which
|
||||||
|
|
||||||
|
from __future__ import FeatureName
|
||||||
|
|
||||||
|
was accepted.
|
||||||
|
|
||||||
|
In the case of MandatoryReleases that have not yet occurred,
|
||||||
|
MandatoryRelease predicts the release in which the feature will become part
|
||||||
|
of the language.
|
||||||
|
|
||||||
|
Else MandatoryRelease records when the feature became part of the language;
|
||||||
|
in releases at or after that, modules no longer need
|
||||||
|
|
||||||
|
from __future__ import FeatureName
|
||||||
|
|
||||||
|
to use the feature in question, but may continue to use such imports.
|
||||||
|
|
||||||
|
MandatoryRelease may also be None, meaning that a planned feature got
|
||||||
|
dropped.
|
||||||
|
|
||||||
|
Instances of class _Feature have two corresponding methods,
|
||||||
|
.getOptionalRelease() and .getMandatoryRelease().
|
||||||
|
|
||||||
|
CompilerFlag is the (bitfield) flag that should be passed in the fourth
|
||||||
|
argument to the builtin function compile() to enable the feature in
|
||||||
|
dynamically compiled code. This flag is stored in the .compiler_flag
|
||||||
|
attribute on _Future instances. These values must match the appropriate
|
||||||
|
#defines of CO_xxx flags in Include/cpython/compile.h.
|
||||||
|
|
||||||
|
No feature line is ever to be deleted from this file.
|
||||||
|
"""
|
||||||
|
|
||||||
|
all_feature_names = [
|
||||||
|
"nested_scopes",
|
||||||
|
"generators",
|
||||||
|
"division",
|
||||||
|
"absolute_import",
|
||||||
|
"with_statement",
|
||||||
|
"print_function",
|
||||||
|
"unicode_literals",
|
||||||
|
"barry_as_FLUFL",
|
||||||
|
"generator_stop",
|
||||||
|
"annotations",
|
||||||
|
]
|
||||||
|
|
||||||
|
__all__ = ["all_feature_names"] + all_feature_names
|
||||||
|
|
||||||
|
# The CO_xxx symbols are defined here under the same names defined in
|
||||||
|
# code.h and used by compile.h, so that an editor search will find them here.
|
||||||
|
# However, they're not exported in __all__, because they don't really belong to
|
||||||
|
# this module.
|
||||||
|
CO_NESTED = 0x0010 # nested_scopes
|
||||||
|
CO_GENERATOR_ALLOWED = 0 # generators (obsolete, was 0x1000)
|
||||||
|
CO_FUTURE_DIVISION = 0x20000 # division
|
||||||
|
CO_FUTURE_ABSOLUTE_IMPORT = 0x40000 # perform absolute imports by default
|
||||||
|
CO_FUTURE_WITH_STATEMENT = 0x80000 # with statement
|
||||||
|
CO_FUTURE_PRINT_FUNCTION = 0x100000 # print function
|
||||||
|
CO_FUTURE_UNICODE_LITERALS = 0x200000 # unicode string literals
|
||||||
|
CO_FUTURE_BARRY_AS_BDFL = 0x400000
|
||||||
|
CO_FUTURE_GENERATOR_STOP = 0x800000 # StopIteration becomes RuntimeError in generators
|
||||||
|
CO_FUTURE_ANNOTATIONS = 0x1000000 # annotations become strings at runtime
|
||||||
|
|
||||||
|
|
||||||
|
class _Feature:
|
||||||
|
|
||||||
|
def __init__(self, optionalRelease, mandatoryRelease, compiler_flag):
|
||||||
|
self.optional = optionalRelease
|
||||||
|
self.mandatory = mandatoryRelease
|
||||||
|
self.compiler_flag = compiler_flag
|
||||||
|
|
||||||
|
def getOptionalRelease(self):
|
||||||
|
"""Return first release in which this feature was recognized.
|
||||||
|
|
||||||
|
This is a 5-tuple, of the same form as sys.version_info.
|
||||||
|
"""
|
||||||
|
return self.optional
|
||||||
|
|
||||||
|
def getMandatoryRelease(self):
|
||||||
|
"""Return release in which this feature will become mandatory.
|
||||||
|
|
||||||
|
This is a 5-tuple, of the same form as sys.version_info, or, if
|
||||||
|
the feature was dropped, is None.
|
||||||
|
"""
|
||||||
|
return self.mandatory
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "_Feature" + repr((self.optional,
|
||||||
|
self.mandatory,
|
||||||
|
self.compiler_flag))
|
||||||
|
|
||||||
|
|
||||||
|
nested_scopes = _Feature((2, 1, 0, "beta", 1),
|
||||||
|
(2, 2, 0, "alpha", 0),
|
||||||
|
CO_NESTED)
|
||||||
|
|
||||||
|
generators = _Feature((2, 2, 0, "alpha", 1),
|
||||||
|
(2, 3, 0, "final", 0),
|
||||||
|
CO_GENERATOR_ALLOWED)
|
||||||
|
|
||||||
|
division = _Feature((2, 2, 0, "alpha", 2),
|
||||||
|
(3, 0, 0, "alpha", 0),
|
||||||
|
CO_FUTURE_DIVISION)
|
||||||
|
|
||||||
|
absolute_import = _Feature((2, 5, 0, "alpha", 1),
|
||||||
|
(3, 0, 0, "alpha", 0),
|
||||||
|
CO_FUTURE_ABSOLUTE_IMPORT)
|
||||||
|
|
||||||
|
with_statement = _Feature((2, 5, 0, "alpha", 1),
|
||||||
|
(2, 6, 0, "alpha", 0),
|
||||||
|
CO_FUTURE_WITH_STATEMENT)
|
||||||
|
|
||||||
|
print_function = _Feature((2, 6, 0, "alpha", 2),
|
||||||
|
(3, 0, 0, "alpha", 0),
|
||||||
|
CO_FUTURE_PRINT_FUNCTION)
|
||||||
|
|
||||||
|
unicode_literals = _Feature((2, 6, 0, "alpha", 2),
|
||||||
|
(3, 0, 0, "alpha", 0),
|
||||||
|
CO_FUTURE_UNICODE_LITERALS)
|
||||||
|
|
||||||
|
barry_as_FLUFL = _Feature((3, 1, 0, "alpha", 2),
|
||||||
|
(4, 0, 0, "alpha", 0),
|
||||||
|
CO_FUTURE_BARRY_AS_BDFL)
|
||||||
|
|
||||||
|
generator_stop = _Feature((3, 5, 0, "beta", 1),
|
||||||
|
(3, 7, 0, "alpha", 0),
|
||||||
|
CO_FUTURE_GENERATOR_STOP)
|
||||||
|
|
||||||
|
annotations = _Feature((3, 7, 0, "beta", 1),
|
||||||
|
(3, 11, 0, "alpha", 0),
|
||||||
|
CO_FUTURE_ANNOTATIONS)
|
89
Python310/Lib/_aix_support.py
Normal file
89
Python310/Lib/_aix_support.py
Normal file
@ -0,0 +1,89 @@
|
|||||||
|
"""Shared AIX support functions."""
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import sysconfig
|
||||||
|
|
||||||
|
try:
|
||||||
|
import subprocess
|
||||||
|
except ImportError: # pragma: no cover
|
||||||
|
# _aix_support is used in distutils by setup.py to build C extensions,
|
||||||
|
# before subprocess dependencies like _posixsubprocess are available.
|
||||||
|
import _bootsubprocess as subprocess
|
||||||
|
|
||||||
|
|
||||||
|
def _aix_tag(vrtl, bd):
|
||||||
|
# type: (List[int], int) -> str
|
||||||
|
# Infer the ABI bitwidth from maxsize (assuming 64 bit as the default)
|
||||||
|
_sz = 32 if sys.maxsize == (2**31-1) else 64
|
||||||
|
# vrtl[version, release, technology_level]
|
||||||
|
return "aix-{:1x}{:1d}{:02d}-{:04d}-{}".format(vrtl[0], vrtl[1], vrtl[2], bd, _sz)
|
||||||
|
|
||||||
|
|
||||||
|
# extract version, release and technology level from a VRMF string
|
||||||
|
def _aix_vrtl(vrmf):
|
||||||
|
# type: (str) -> List[int]
|
||||||
|
v, r, tl = vrmf.split(".")[:3]
|
||||||
|
return [int(v[-1]), int(r), int(tl)]
|
||||||
|
|
||||||
|
|
||||||
|
def _aix_bosmp64():
|
||||||
|
# type: () -> Tuple[str, int]
|
||||||
|
"""
|
||||||
|
Return a Tuple[str, int] e.g., ['7.1.4.34', 1806]
|
||||||
|
The fileset bos.mp64 is the AIX kernel. It's VRMF and builddate
|
||||||
|
reflect the current ABI levels of the runtime environment.
|
||||||
|
"""
|
||||||
|
# We expect all AIX systems to have lslpp installed in this location
|
||||||
|
out = subprocess.check_output(["/usr/bin/lslpp", "-Lqc", "bos.mp64"])
|
||||||
|
out = out.decode("utf-8")
|
||||||
|
out = out.strip().split(":") # type: ignore
|
||||||
|
# Use str() and int() to help mypy see types
|
||||||
|
return (str(out[2]), int(out[-1]))
|
||||||
|
|
||||||
|
|
||||||
|
def aix_platform():
|
||||||
|
# type: () -> str
|
||||||
|
"""
|
||||||
|
AIX filesets are identified by four decimal values: V.R.M.F.
|
||||||
|
V (version) and R (release) can be retreived using ``uname``
|
||||||
|
Since 2007, starting with AIX 5.3 TL7, the M value has been
|
||||||
|
included with the fileset bos.mp64 and represents the Technology
|
||||||
|
Level (TL) of AIX. The F (Fix) value also increases, but is not
|
||||||
|
relevant for comparing releases and binary compatibility.
|
||||||
|
For binary compatibility the so-called builddate is needed.
|
||||||
|
Again, the builddate of an AIX release is associated with bos.mp64.
|
||||||
|
AIX ABI compatibility is described as guaranteed at: https://www.ibm.com/\
|
||||||
|
support/knowledgecenter/en/ssw_aix_72/install/binary_compatability.html
|
||||||
|
|
||||||
|
For pep425 purposes the AIX platform tag becomes:
|
||||||
|
"aix-{:1x}{:1d}{:02d}-{:04d}-{}".format(v, r, tl, builddate, bitsize)
|
||||||
|
e.g., "aix-6107-1415-32" for AIX 6.1 TL7 bd 1415, 32-bit
|
||||||
|
and, "aix-6107-1415-64" for AIX 6.1 TL7 bd 1415, 64-bit
|
||||||
|
"""
|
||||||
|
vrmf, bd = _aix_bosmp64()
|
||||||
|
return _aix_tag(_aix_vrtl(vrmf), bd)
|
||||||
|
|
||||||
|
|
||||||
|
# extract vrtl from the BUILD_GNU_TYPE as an int
|
||||||
|
def _aix_bgt():
|
||||||
|
# type: () -> List[int]
|
||||||
|
gnu_type = sysconfig.get_config_var("BUILD_GNU_TYPE")
|
||||||
|
if not gnu_type:
|
||||||
|
raise ValueError("BUILD_GNU_TYPE is not defined")
|
||||||
|
return _aix_vrtl(vrmf=gnu_type)
|
||||||
|
|
||||||
|
|
||||||
|
def aix_buildtag():
|
||||||
|
# type: () -> str
|
||||||
|
"""
|
||||||
|
Return the platform_tag of the system Python was built on.
|
||||||
|
"""
|
||||||
|
# AIX_BUILDDATE is defined by configure with:
|
||||||
|
# lslpp -Lcq bos.mp64 | awk -F: '{ print $NF }'
|
||||||
|
build_date = sysconfig.get_config_var("AIX_BUILDDATE")
|
||||||
|
try:
|
||||||
|
build_date = int(build_date)
|
||||||
|
except (ValueError, TypeError):
|
||||||
|
raise ValueError(f"AIX_BUILDDATE is not defined or invalid: "
|
||||||
|
f"{build_date!r}")
|
||||||
|
return _aix_tag(_aix_bgt(), build_date)
|
1166
Python310/Lib/_collections_abc.py
Normal file
1166
Python310/Lib/_collections_abc.py
Normal file
File diff suppressed because it is too large
Load Diff
162
Python310/Lib/_compression.py
Normal file
162
Python310/Lib/_compression.py
Normal file
@ -0,0 +1,162 @@
|
|||||||
|
"""Internal classes used by the gzip, lzma and bz2 modules"""
|
||||||
|
|
||||||
|
import io
|
||||||
|
import sys
|
||||||
|
|
||||||
|
BUFFER_SIZE = io.DEFAULT_BUFFER_SIZE # Compressed data read chunk size
|
||||||
|
|
||||||
|
|
||||||
|
class BaseStream(io.BufferedIOBase):
|
||||||
|
"""Mode-checking helper functions."""
|
||||||
|
|
||||||
|
def _check_not_closed(self):
|
||||||
|
if self.closed:
|
||||||
|
raise ValueError("I/O operation on closed file")
|
||||||
|
|
||||||
|
def _check_can_read(self):
|
||||||
|
if not self.readable():
|
||||||
|
raise io.UnsupportedOperation("File not open for reading")
|
||||||
|
|
||||||
|
def _check_can_write(self):
|
||||||
|
if not self.writable():
|
||||||
|
raise io.UnsupportedOperation("File not open for writing")
|
||||||
|
|
||||||
|
def _check_can_seek(self):
|
||||||
|
if not self.readable():
|
||||||
|
raise io.UnsupportedOperation("Seeking is only supported "
|
||||||
|
"on files open for reading")
|
||||||
|
if not self.seekable():
|
||||||
|
raise io.UnsupportedOperation("The underlying file object "
|
||||||
|
"does not support seeking")
|
||||||
|
|
||||||
|
|
||||||
|
class DecompressReader(io.RawIOBase):
|
||||||
|
"""Adapts the decompressor API to a RawIOBase reader API"""
|
||||||
|
|
||||||
|
def readable(self):
|
||||||
|
return True
|
||||||
|
|
||||||
|
def __init__(self, fp, decomp_factory, trailing_error=(), **decomp_args):
|
||||||
|
self._fp = fp
|
||||||
|
self._eof = False
|
||||||
|
self._pos = 0 # Current offset in decompressed stream
|
||||||
|
|
||||||
|
# Set to size of decompressed stream once it is known, for SEEK_END
|
||||||
|
self._size = -1
|
||||||
|
|
||||||
|
# Save the decompressor factory and arguments.
|
||||||
|
# If the file contains multiple compressed streams, each
|
||||||
|
# stream will need a separate decompressor object. A new decompressor
|
||||||
|
# object is also needed when implementing a backwards seek().
|
||||||
|
self._decomp_factory = decomp_factory
|
||||||
|
self._decomp_args = decomp_args
|
||||||
|
self._decompressor = self._decomp_factory(**self._decomp_args)
|
||||||
|
|
||||||
|
# Exception class to catch from decompressor signifying invalid
|
||||||
|
# trailing data to ignore
|
||||||
|
self._trailing_error = trailing_error
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
self._decompressor = None
|
||||||
|
return super().close()
|
||||||
|
|
||||||
|
def seekable(self):
|
||||||
|
return self._fp.seekable()
|
||||||
|
|
||||||
|
def readinto(self, b):
|
||||||
|
with memoryview(b) as view, view.cast("B") as byte_view:
|
||||||
|
data = self.read(len(byte_view))
|
||||||
|
byte_view[:len(data)] = data
|
||||||
|
return len(data)
|
||||||
|
|
||||||
|
def read(self, size=-1):
|
||||||
|
if size < 0:
|
||||||
|
return self.readall()
|
||||||
|
|
||||||
|
if not size or self._eof:
|
||||||
|
return b""
|
||||||
|
data = None # Default if EOF is encountered
|
||||||
|
# Depending on the input data, our call to the decompressor may not
|
||||||
|
# return any data. In this case, try again after reading another block.
|
||||||
|
while True:
|
||||||
|
if self._decompressor.eof:
|
||||||
|
rawblock = (self._decompressor.unused_data or
|
||||||
|
self._fp.read(BUFFER_SIZE))
|
||||||
|
if not rawblock:
|
||||||
|
break
|
||||||
|
# Continue to next stream.
|
||||||
|
self._decompressor = self._decomp_factory(
|
||||||
|
**self._decomp_args)
|
||||||
|
try:
|
||||||
|
data = self._decompressor.decompress(rawblock, size)
|
||||||
|
except self._trailing_error:
|
||||||
|
# Trailing data isn't a valid compressed stream; ignore it.
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
if self._decompressor.needs_input:
|
||||||
|
rawblock = self._fp.read(BUFFER_SIZE)
|
||||||
|
if not rawblock:
|
||||||
|
raise EOFError("Compressed file ended before the "
|
||||||
|
"end-of-stream marker was reached")
|
||||||
|
else:
|
||||||
|
rawblock = b""
|
||||||
|
data = self._decompressor.decompress(rawblock, size)
|
||||||
|
if data:
|
||||||
|
break
|
||||||
|
if not data:
|
||||||
|
self._eof = True
|
||||||
|
self._size = self._pos
|
||||||
|
return b""
|
||||||
|
self._pos += len(data)
|
||||||
|
return data
|
||||||
|
|
||||||
|
def readall(self):
|
||||||
|
chunks = []
|
||||||
|
# sys.maxsize means the max length of output buffer is unlimited,
|
||||||
|
# so that the whole input buffer can be decompressed within one
|
||||||
|
# .decompress() call.
|
||||||
|
while data := self.read(sys.maxsize):
|
||||||
|
chunks.append(data)
|
||||||
|
|
||||||
|
return b"".join(chunks)
|
||||||
|
|
||||||
|
# Rewind the file to the beginning of the data stream.
|
||||||
|
def _rewind(self):
|
||||||
|
self._fp.seek(0)
|
||||||
|
self._eof = False
|
||||||
|
self._pos = 0
|
||||||
|
self._decompressor = self._decomp_factory(**self._decomp_args)
|
||||||
|
|
||||||
|
def seek(self, offset, whence=io.SEEK_SET):
|
||||||
|
# Recalculate offset as an absolute file position.
|
||||||
|
if whence == io.SEEK_SET:
|
||||||
|
pass
|
||||||
|
elif whence == io.SEEK_CUR:
|
||||||
|
offset = self._pos + offset
|
||||||
|
elif whence == io.SEEK_END:
|
||||||
|
# Seeking relative to EOF - we need to know the file's size.
|
||||||
|
if self._size < 0:
|
||||||
|
while self.read(io.DEFAULT_BUFFER_SIZE):
|
||||||
|
pass
|
||||||
|
offset = self._size + offset
|
||||||
|
else:
|
||||||
|
raise ValueError("Invalid value for whence: {}".format(whence))
|
||||||
|
|
||||||
|
# Make it so that offset is the number of bytes to skip forward.
|
||||||
|
if offset < self._pos:
|
||||||
|
self._rewind()
|
||||||
|
else:
|
||||||
|
offset -= self._pos
|
||||||
|
|
||||||
|
# Read and discard data until we reach the desired position.
|
||||||
|
while offset > 0:
|
||||||
|
data = self.read(min(io.DEFAULT_BUFFER_SIZE, offset))
|
||||||
|
if not data:
|
||||||
|
break
|
||||||
|
offset -= len(data)
|
||||||
|
|
||||||
|
return self._pos
|
||||||
|
|
||||||
|
def tell(self):
|
||||||
|
"""Return the current file position."""
|
||||||
|
return self._pos
|
396
Python310/Lib/_markupbase.py
Normal file
396
Python310/Lib/_markupbase.py
Normal file
@ -0,0 +1,396 @@
|
|||||||
|
"""Shared support for scanning document type declarations in HTML and XHTML.
|
||||||
|
|
||||||
|
This module is used as a foundation for the html.parser module. It has no
|
||||||
|
documented public API and should not be used directly.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import re
|
||||||
|
|
||||||
|
_declname_match = re.compile(r'[a-zA-Z][-_.a-zA-Z0-9]*\s*').match
|
||||||
|
_declstringlit_match = re.compile(r'(\'[^\']*\'|"[^"]*")\s*').match
|
||||||
|
_commentclose = re.compile(r'--\s*>')
|
||||||
|
_markedsectionclose = re.compile(r']\s*]\s*>')
|
||||||
|
|
||||||
|
# An analysis of the MS-Word extensions is available at
|
||||||
|
# http://www.planetpublish.com/xmlarena/xap/Thursday/WordtoXML.pdf
|
||||||
|
|
||||||
|
_msmarkedsectionclose = re.compile(r']\s*>')
|
||||||
|
|
||||||
|
del re
|
||||||
|
|
||||||
|
|
||||||
|
class ParserBase:
|
||||||
|
"""Parser base class which provides some common support methods used
|
||||||
|
by the SGML/HTML and XHTML parsers."""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
if self.__class__ is ParserBase:
|
||||||
|
raise RuntimeError(
|
||||||
|
"_markupbase.ParserBase must be subclassed")
|
||||||
|
|
||||||
|
def reset(self):
|
||||||
|
self.lineno = 1
|
||||||
|
self.offset = 0
|
||||||
|
|
||||||
|
def getpos(self):
|
||||||
|
"""Return current line number and offset."""
|
||||||
|
return self.lineno, self.offset
|
||||||
|
|
||||||
|
# Internal -- update line number and offset. This should be
|
||||||
|
# called for each piece of data exactly once, in order -- in other
|
||||||
|
# words the concatenation of all the input strings to this
|
||||||
|
# function should be exactly the entire input.
|
||||||
|
def updatepos(self, i, j):
|
||||||
|
if i >= j:
|
||||||
|
return j
|
||||||
|
rawdata = self.rawdata
|
||||||
|
nlines = rawdata.count("\n", i, j)
|
||||||
|
if nlines:
|
||||||
|
self.lineno = self.lineno + nlines
|
||||||
|
pos = rawdata.rindex("\n", i, j) # Should not fail
|
||||||
|
self.offset = j-(pos+1)
|
||||||
|
else:
|
||||||
|
self.offset = self.offset + j-i
|
||||||
|
return j
|
||||||
|
|
||||||
|
_decl_otherchars = ''
|
||||||
|
|
||||||
|
# Internal -- parse declaration (for use by subclasses).
|
||||||
|
def parse_declaration(self, i):
|
||||||
|
# This is some sort of declaration; in "HTML as
|
||||||
|
# deployed," this should only be the document type
|
||||||
|
# declaration ("<!DOCTYPE html...>").
|
||||||
|
# ISO 8879:1986, however, has more complex
|
||||||
|
# declaration syntax for elements in <!...>, including:
|
||||||
|
# --comment--
|
||||||
|
# [marked section]
|
||||||
|
# name in the following list: ENTITY, DOCTYPE, ELEMENT,
|
||||||
|
# ATTLIST, NOTATION, SHORTREF, USEMAP,
|
||||||
|
# LINKTYPE, LINK, IDLINK, USELINK, SYSTEM
|
||||||
|
rawdata = self.rawdata
|
||||||
|
j = i + 2
|
||||||
|
assert rawdata[i:j] == "<!", "unexpected call to parse_declaration"
|
||||||
|
if rawdata[j:j+1] == ">":
|
||||||
|
# the empty comment <!>
|
||||||
|
return j + 1
|
||||||
|
if rawdata[j:j+1] in ("-", ""):
|
||||||
|
# Start of comment followed by buffer boundary,
|
||||||
|
# or just a buffer boundary.
|
||||||
|
return -1
|
||||||
|
# A simple, practical version could look like: ((name|stringlit) S*) + '>'
|
||||||
|
n = len(rawdata)
|
||||||
|
if rawdata[j:j+2] == '--': #comment
|
||||||
|
# Locate --.*-- as the body of the comment
|
||||||
|
return self.parse_comment(i)
|
||||||
|
elif rawdata[j] == '[': #marked section
|
||||||
|
# Locate [statusWord [...arbitrary SGML...]] as the body of the marked section
|
||||||
|
# Where statusWord is one of TEMP, CDATA, IGNORE, INCLUDE, RCDATA
|
||||||
|
# Note that this is extended by Microsoft Office "Save as Web" function
|
||||||
|
# to include [if...] and [endif].
|
||||||
|
return self.parse_marked_section(i)
|
||||||
|
else: #all other declaration elements
|
||||||
|
decltype, j = self._scan_name(j, i)
|
||||||
|
if j < 0:
|
||||||
|
return j
|
||||||
|
if decltype == "doctype":
|
||||||
|
self._decl_otherchars = ''
|
||||||
|
while j < n:
|
||||||
|
c = rawdata[j]
|
||||||
|
if c == ">":
|
||||||
|
# end of declaration syntax
|
||||||
|
data = rawdata[i+2:j]
|
||||||
|
if decltype == "doctype":
|
||||||
|
self.handle_decl(data)
|
||||||
|
else:
|
||||||
|
# According to the HTML5 specs sections "8.2.4.44 Bogus
|
||||||
|
# comment state" and "8.2.4.45 Markup declaration open
|
||||||
|
# state", a comment token should be emitted.
|
||||||
|
# Calling unknown_decl provides more flexibility though.
|
||||||
|
self.unknown_decl(data)
|
||||||
|
return j + 1
|
||||||
|
if c in "\"'":
|
||||||
|
m = _declstringlit_match(rawdata, j)
|
||||||
|
if not m:
|
||||||
|
return -1 # incomplete
|
||||||
|
j = m.end()
|
||||||
|
elif c in "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ":
|
||||||
|
name, j = self._scan_name(j, i)
|
||||||
|
elif c in self._decl_otherchars:
|
||||||
|
j = j + 1
|
||||||
|
elif c == "[":
|
||||||
|
# this could be handled in a separate doctype parser
|
||||||
|
if decltype == "doctype":
|
||||||
|
j = self._parse_doctype_subset(j + 1, i)
|
||||||
|
elif decltype in {"attlist", "linktype", "link", "element"}:
|
||||||
|
# must tolerate []'d groups in a content model in an element declaration
|
||||||
|
# also in data attribute specifications of attlist declaration
|
||||||
|
# also link type declaration subsets in linktype declarations
|
||||||
|
# also link attribute specification lists in link declarations
|
||||||
|
raise AssertionError("unsupported '[' char in %s declaration" % decltype)
|
||||||
|
else:
|
||||||
|
raise AssertionError("unexpected '[' char in declaration")
|
||||||
|
else:
|
||||||
|
raise AssertionError("unexpected %r char in declaration" % rawdata[j])
|
||||||
|
if j < 0:
|
||||||
|
return j
|
||||||
|
return -1 # incomplete
|
||||||
|
|
||||||
|
# Internal -- parse a marked section
|
||||||
|
# Override this to handle MS-word extension syntax <![if word]>content<![endif]>
|
||||||
|
def parse_marked_section(self, i, report=1):
|
||||||
|
rawdata= self.rawdata
|
||||||
|
assert rawdata[i:i+3] == '<![', "unexpected call to parse_marked_section()"
|
||||||
|
sectName, j = self._scan_name( i+3, i )
|
||||||
|
if j < 0:
|
||||||
|
return j
|
||||||
|
if sectName in {"temp", "cdata", "ignore", "include", "rcdata"}:
|
||||||
|
# look for standard ]]> ending
|
||||||
|
match= _markedsectionclose.search(rawdata, i+3)
|
||||||
|
elif sectName in {"if", "else", "endif"}:
|
||||||
|
# look for MS Office ]> ending
|
||||||
|
match= _msmarkedsectionclose.search(rawdata, i+3)
|
||||||
|
else:
|
||||||
|
raise AssertionError(
|
||||||
|
'unknown status keyword %r in marked section' % rawdata[i+3:j]
|
||||||
|
)
|
||||||
|
if not match:
|
||||||
|
return -1
|
||||||
|
if report:
|
||||||
|
j = match.start(0)
|
||||||
|
self.unknown_decl(rawdata[i+3: j])
|
||||||
|
return match.end(0)
|
||||||
|
|
||||||
|
# Internal -- parse comment, return length or -1 if not terminated
|
||||||
|
def parse_comment(self, i, report=1):
|
||||||
|
rawdata = self.rawdata
|
||||||
|
if rawdata[i:i+4] != '<!--':
|
||||||
|
raise AssertionError('unexpected call to parse_comment()')
|
||||||
|
match = _commentclose.search(rawdata, i+4)
|
||||||
|
if not match:
|
||||||
|
return -1
|
||||||
|
if report:
|
||||||
|
j = match.start(0)
|
||||||
|
self.handle_comment(rawdata[i+4: j])
|
||||||
|
return match.end(0)
|
||||||
|
|
||||||
|
# Internal -- scan past the internal subset in a <!DOCTYPE declaration,
|
||||||
|
# returning the index just past any whitespace following the trailing ']'.
|
||||||
|
def _parse_doctype_subset(self, i, declstartpos):
|
||||||
|
rawdata = self.rawdata
|
||||||
|
n = len(rawdata)
|
||||||
|
j = i
|
||||||
|
while j < n:
|
||||||
|
c = rawdata[j]
|
||||||
|
if c == "<":
|
||||||
|
s = rawdata[j:j+2]
|
||||||
|
if s == "<":
|
||||||
|
# end of buffer; incomplete
|
||||||
|
return -1
|
||||||
|
if s != "<!":
|
||||||
|
self.updatepos(declstartpos, j + 1)
|
||||||
|
raise AssertionError(
|
||||||
|
"unexpected char in internal subset (in %r)" % s
|
||||||
|
)
|
||||||
|
if (j + 2) == n:
|
||||||
|
# end of buffer; incomplete
|
||||||
|
return -1
|
||||||
|
if (j + 4) > n:
|
||||||
|
# end of buffer; incomplete
|
||||||
|
return -1
|
||||||
|
if rawdata[j:j+4] == "<!--":
|
||||||
|
j = self.parse_comment(j, report=0)
|
||||||
|
if j < 0:
|
||||||
|
return j
|
||||||
|
continue
|
||||||
|
name, j = self._scan_name(j + 2, declstartpos)
|
||||||
|
if j == -1:
|
||||||
|
return -1
|
||||||
|
if name not in {"attlist", "element", "entity", "notation"}:
|
||||||
|
self.updatepos(declstartpos, j + 2)
|
||||||
|
raise AssertionError(
|
||||||
|
"unknown declaration %r in internal subset" % name
|
||||||
|
)
|
||||||
|
# handle the individual names
|
||||||
|
meth = getattr(self, "_parse_doctype_" + name)
|
||||||
|
j = meth(j, declstartpos)
|
||||||
|
if j < 0:
|
||||||
|
return j
|
||||||
|
elif c == "%":
|
||||||
|
# parameter entity reference
|
||||||
|
if (j + 1) == n:
|
||||||
|
# end of buffer; incomplete
|
||||||
|
return -1
|
||||||
|
s, j = self._scan_name(j + 1, declstartpos)
|
||||||
|
if j < 0:
|
||||||
|
return j
|
||||||
|
if rawdata[j] == ";":
|
||||||
|
j = j + 1
|
||||||
|
elif c == "]":
|
||||||
|
j = j + 1
|
||||||
|
while j < n and rawdata[j].isspace():
|
||||||
|
j = j + 1
|
||||||
|
if j < n:
|
||||||
|
if rawdata[j] == ">":
|
||||||
|
return j
|
||||||
|
self.updatepos(declstartpos, j)
|
||||||
|
raise AssertionError("unexpected char after internal subset")
|
||||||
|
else:
|
||||||
|
return -1
|
||||||
|
elif c.isspace():
|
||||||
|
j = j + 1
|
||||||
|
else:
|
||||||
|
self.updatepos(declstartpos, j)
|
||||||
|
raise AssertionError("unexpected char %r in internal subset" % c)
|
||||||
|
# end of buffer reached
|
||||||
|
return -1
|
||||||
|
|
||||||
|
# Internal -- scan past <!ELEMENT declarations
|
||||||
|
def _parse_doctype_element(self, i, declstartpos):
|
||||||
|
name, j = self._scan_name(i, declstartpos)
|
||||||
|
if j == -1:
|
||||||
|
return -1
|
||||||
|
# style content model; just skip until '>'
|
||||||
|
rawdata = self.rawdata
|
||||||
|
if '>' in rawdata[j:]:
|
||||||
|
return rawdata.find(">", j) + 1
|
||||||
|
return -1
|
||||||
|
|
||||||
|
# Internal -- scan past <!ATTLIST declarations
|
||||||
|
def _parse_doctype_attlist(self, i, declstartpos):
|
||||||
|
rawdata = self.rawdata
|
||||||
|
name, j = self._scan_name(i, declstartpos)
|
||||||
|
c = rawdata[j:j+1]
|
||||||
|
if c == "":
|
||||||
|
return -1
|
||||||
|
if c == ">":
|
||||||
|
return j + 1
|
||||||
|
while 1:
|
||||||
|
# scan a series of attribute descriptions; simplified:
|
||||||
|
# name type [value] [#constraint]
|
||||||
|
name, j = self._scan_name(j, declstartpos)
|
||||||
|
if j < 0:
|
||||||
|
return j
|
||||||
|
c = rawdata[j:j+1]
|
||||||
|
if c == "":
|
||||||
|
return -1
|
||||||
|
if c == "(":
|
||||||
|
# an enumerated type; look for ')'
|
||||||
|
if ")" in rawdata[j:]:
|
||||||
|
j = rawdata.find(")", j) + 1
|
||||||
|
else:
|
||||||
|
return -1
|
||||||
|
while rawdata[j:j+1].isspace():
|
||||||
|
j = j + 1
|
||||||
|
if not rawdata[j:]:
|
||||||
|
# end of buffer, incomplete
|
||||||
|
return -1
|
||||||
|
else:
|
||||||
|
name, j = self._scan_name(j, declstartpos)
|
||||||
|
c = rawdata[j:j+1]
|
||||||
|
if not c:
|
||||||
|
return -1
|
||||||
|
if c in "'\"":
|
||||||
|
m = _declstringlit_match(rawdata, j)
|
||||||
|
if m:
|
||||||
|
j = m.end()
|
||||||
|
else:
|
||||||
|
return -1
|
||||||
|
c = rawdata[j:j+1]
|
||||||
|
if not c:
|
||||||
|
return -1
|
||||||
|
if c == "#":
|
||||||
|
if rawdata[j:] == "#":
|
||||||
|
# end of buffer
|
||||||
|
return -1
|
||||||
|
name, j = self._scan_name(j + 1, declstartpos)
|
||||||
|
if j < 0:
|
||||||
|
return j
|
||||||
|
c = rawdata[j:j+1]
|
||||||
|
if not c:
|
||||||
|
return -1
|
||||||
|
if c == '>':
|
||||||
|
# all done
|
||||||
|
return j + 1
|
||||||
|
|
||||||
|
# Internal -- scan past <!NOTATION declarations
|
||||||
|
def _parse_doctype_notation(self, i, declstartpos):
|
||||||
|
name, j = self._scan_name(i, declstartpos)
|
||||||
|
if j < 0:
|
||||||
|
return j
|
||||||
|
rawdata = self.rawdata
|
||||||
|
while 1:
|
||||||
|
c = rawdata[j:j+1]
|
||||||
|
if not c:
|
||||||
|
# end of buffer; incomplete
|
||||||
|
return -1
|
||||||
|
if c == '>':
|
||||||
|
return j + 1
|
||||||
|
if c in "'\"":
|
||||||
|
m = _declstringlit_match(rawdata, j)
|
||||||
|
if not m:
|
||||||
|
return -1
|
||||||
|
j = m.end()
|
||||||
|
else:
|
||||||
|
name, j = self._scan_name(j, declstartpos)
|
||||||
|
if j < 0:
|
||||||
|
return j
|
||||||
|
|
||||||
|
# Internal -- scan past <!ENTITY declarations
|
||||||
|
def _parse_doctype_entity(self, i, declstartpos):
|
||||||
|
rawdata = self.rawdata
|
||||||
|
if rawdata[i:i+1] == "%":
|
||||||
|
j = i + 1
|
||||||
|
while 1:
|
||||||
|
c = rawdata[j:j+1]
|
||||||
|
if not c:
|
||||||
|
return -1
|
||||||
|
if c.isspace():
|
||||||
|
j = j + 1
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
j = i
|
||||||
|
name, j = self._scan_name(j, declstartpos)
|
||||||
|
if j < 0:
|
||||||
|
return j
|
||||||
|
while 1:
|
||||||
|
c = self.rawdata[j:j+1]
|
||||||
|
if not c:
|
||||||
|
return -1
|
||||||
|
if c in "'\"":
|
||||||
|
m = _declstringlit_match(rawdata, j)
|
||||||
|
if m:
|
||||||
|
j = m.end()
|
||||||
|
else:
|
||||||
|
return -1 # incomplete
|
||||||
|
elif c == ">":
|
||||||
|
return j + 1
|
||||||
|
else:
|
||||||
|
name, j = self._scan_name(j, declstartpos)
|
||||||
|
if j < 0:
|
||||||
|
return j
|
||||||
|
|
||||||
|
# Internal -- scan a name token and the new position and the token, or
|
||||||
|
# return -1 if we've reached the end of the buffer.
|
||||||
|
def _scan_name(self, i, declstartpos):
|
||||||
|
rawdata = self.rawdata
|
||||||
|
n = len(rawdata)
|
||||||
|
if i == n:
|
||||||
|
return None, -1
|
||||||
|
m = _declname_match(rawdata, i)
|
||||||
|
if m:
|
||||||
|
s = m.group()
|
||||||
|
name = s.strip()
|
||||||
|
if (i + len(s)) == n:
|
||||||
|
return None, -1 # end of buffer
|
||||||
|
return name.lower(), m.end()
|
||||||
|
else:
|
||||||
|
self.updatepos(declstartpos, i)
|
||||||
|
raise AssertionError(
|
||||||
|
"expected name token at %r" % rawdata[declstartpos:declstartpos+20]
|
||||||
|
)
|
||||||
|
|
||||||
|
# To be overridden -- handlers for unknown objects
|
||||||
|
def unknown_decl(self, data):
|
||||||
|
pass
|
574
Python310/Lib/_osx_support.py
Normal file
574
Python310/Lib/_osx_support.py
Normal file
@ -0,0 +1,574 @@
|
|||||||
|
"""Shared OS X support functions."""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
'compiler_fixup',
|
||||||
|
'customize_config_vars',
|
||||||
|
'customize_compiler',
|
||||||
|
'get_platform_osx',
|
||||||
|
]
|
||||||
|
|
||||||
|
# configuration variables that may contain universal build flags,
|
||||||
|
# like "-arch" or "-isdkroot", that may need customization for
|
||||||
|
# the user environment
|
||||||
|
_UNIVERSAL_CONFIG_VARS = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS', 'BASECFLAGS',
|
||||||
|
'BLDSHARED', 'LDSHARED', 'CC', 'CXX',
|
||||||
|
'PY_CFLAGS', 'PY_LDFLAGS', 'PY_CPPFLAGS',
|
||||||
|
'PY_CORE_CFLAGS', 'PY_CORE_LDFLAGS')
|
||||||
|
|
||||||
|
# configuration variables that may contain compiler calls
|
||||||
|
_COMPILER_CONFIG_VARS = ('BLDSHARED', 'LDSHARED', 'CC', 'CXX')
|
||||||
|
|
||||||
|
# prefix added to original configuration variable names
|
||||||
|
_INITPRE = '_OSX_SUPPORT_INITIAL_'
|
||||||
|
|
||||||
|
|
||||||
|
def _find_executable(executable, path=None):
|
||||||
|
"""Tries to find 'executable' in the directories listed in 'path'.
|
||||||
|
|
||||||
|
A string listing directories separated by 'os.pathsep'; defaults to
|
||||||
|
os.environ['PATH']. Returns the complete filename or None if not found.
|
||||||
|
"""
|
||||||
|
if path is None:
|
||||||
|
path = os.environ['PATH']
|
||||||
|
|
||||||
|
paths = path.split(os.pathsep)
|
||||||
|
base, ext = os.path.splitext(executable)
|
||||||
|
|
||||||
|
if (sys.platform == 'win32') and (ext != '.exe'):
|
||||||
|
executable = executable + '.exe'
|
||||||
|
|
||||||
|
if not os.path.isfile(executable):
|
||||||
|
for p in paths:
|
||||||
|
f = os.path.join(p, executable)
|
||||||
|
if os.path.isfile(f):
|
||||||
|
# the file exists, we have a shot at spawn working
|
||||||
|
return f
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
return executable
|
||||||
|
|
||||||
|
|
||||||
|
def _read_output(commandstring, capture_stderr=False):
|
||||||
|
"""Output from successful command execution or None"""
|
||||||
|
# Similar to os.popen(commandstring, "r").read(),
|
||||||
|
# but without actually using os.popen because that
|
||||||
|
# function is not usable during python bootstrap.
|
||||||
|
# tempfile is also not available then.
|
||||||
|
import contextlib
|
||||||
|
try:
|
||||||
|
import tempfile
|
||||||
|
fp = tempfile.NamedTemporaryFile()
|
||||||
|
except ImportError:
|
||||||
|
fp = open("/tmp/_osx_support.%s"%(
|
||||||
|
os.getpid(),), "w+b")
|
||||||
|
|
||||||
|
with contextlib.closing(fp) as fp:
|
||||||
|
if capture_stderr:
|
||||||
|
cmd = "%s >'%s' 2>&1" % (commandstring, fp.name)
|
||||||
|
else:
|
||||||
|
cmd = "%s 2>/dev/null >'%s'" % (commandstring, fp.name)
|
||||||
|
return fp.read().decode('utf-8').strip() if not os.system(cmd) else None
|
||||||
|
|
||||||
|
|
||||||
|
def _find_build_tool(toolname):
|
||||||
|
"""Find a build tool on current path or using xcrun"""
|
||||||
|
return (_find_executable(toolname)
|
||||||
|
or _read_output("/usr/bin/xcrun -find %s" % (toolname,))
|
||||||
|
or ''
|
||||||
|
)
|
||||||
|
|
||||||
|
_SYSTEM_VERSION = None
|
||||||
|
|
||||||
|
def _get_system_version():
|
||||||
|
"""Return the OS X system version as a string"""
|
||||||
|
# Reading this plist is a documented way to get the system
|
||||||
|
# version (see the documentation for the Gestalt Manager)
|
||||||
|
# We avoid using platform.mac_ver to avoid possible bootstrap issues during
|
||||||
|
# the build of Python itself (distutils is used to build standard library
|
||||||
|
# extensions).
|
||||||
|
|
||||||
|
global _SYSTEM_VERSION
|
||||||
|
|
||||||
|
if _SYSTEM_VERSION is None:
|
||||||
|
_SYSTEM_VERSION = ''
|
||||||
|
try:
|
||||||
|
f = open('/System/Library/CoreServices/SystemVersion.plist', encoding="utf-8")
|
||||||
|
except OSError:
|
||||||
|
# We're on a plain darwin box, fall back to the default
|
||||||
|
# behaviour.
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
m = re.search(r'<key>ProductUserVisibleVersion</key>\s*'
|
||||||
|
r'<string>(.*?)</string>', f.read())
|
||||||
|
finally:
|
||||||
|
f.close()
|
||||||
|
if m is not None:
|
||||||
|
_SYSTEM_VERSION = '.'.join(m.group(1).split('.')[:2])
|
||||||
|
# else: fall back to the default behaviour
|
||||||
|
|
||||||
|
return _SYSTEM_VERSION
|
||||||
|
|
||||||
|
_SYSTEM_VERSION_TUPLE = None
|
||||||
|
def _get_system_version_tuple():
|
||||||
|
"""
|
||||||
|
Return the macOS system version as a tuple
|
||||||
|
|
||||||
|
The return value is safe to use to compare
|
||||||
|
two version numbers.
|
||||||
|
"""
|
||||||
|
global _SYSTEM_VERSION_TUPLE
|
||||||
|
if _SYSTEM_VERSION_TUPLE is None:
|
||||||
|
osx_version = _get_system_version()
|
||||||
|
if osx_version:
|
||||||
|
try:
|
||||||
|
_SYSTEM_VERSION_TUPLE = tuple(int(i) for i in osx_version.split('.'))
|
||||||
|
except ValueError:
|
||||||
|
_SYSTEM_VERSION_TUPLE = ()
|
||||||
|
|
||||||
|
return _SYSTEM_VERSION_TUPLE
|
||||||
|
|
||||||
|
|
||||||
|
def _remove_original_values(_config_vars):
|
||||||
|
"""Remove original unmodified values for testing"""
|
||||||
|
# This is needed for higher-level cross-platform tests of get_platform.
|
||||||
|
for k in list(_config_vars):
|
||||||
|
if k.startswith(_INITPRE):
|
||||||
|
del _config_vars[k]
|
||||||
|
|
||||||
|
def _save_modified_value(_config_vars, cv, newvalue):
|
||||||
|
"""Save modified and original unmodified value of configuration var"""
|
||||||
|
|
||||||
|
oldvalue = _config_vars.get(cv, '')
|
||||||
|
if (oldvalue != newvalue) and (_INITPRE + cv not in _config_vars):
|
||||||
|
_config_vars[_INITPRE + cv] = oldvalue
|
||||||
|
_config_vars[cv] = newvalue
|
||||||
|
|
||||||
|
|
||||||
|
_cache_default_sysroot = None
|
||||||
|
def _default_sysroot(cc):
|
||||||
|
""" Returns the root of the default SDK for this system, or '/' """
|
||||||
|
global _cache_default_sysroot
|
||||||
|
|
||||||
|
if _cache_default_sysroot is not None:
|
||||||
|
return _cache_default_sysroot
|
||||||
|
|
||||||
|
contents = _read_output('%s -c -E -v - </dev/null' % (cc,), True)
|
||||||
|
in_incdirs = False
|
||||||
|
for line in contents.splitlines():
|
||||||
|
if line.startswith("#include <...>"):
|
||||||
|
in_incdirs = True
|
||||||
|
elif line.startswith("End of search list"):
|
||||||
|
in_incdirs = False
|
||||||
|
elif in_incdirs:
|
||||||
|
line = line.strip()
|
||||||
|
if line == '/usr/include':
|
||||||
|
_cache_default_sysroot = '/'
|
||||||
|
elif line.endswith(".sdk/usr/include"):
|
||||||
|
_cache_default_sysroot = line[:-12]
|
||||||
|
if _cache_default_sysroot is None:
|
||||||
|
_cache_default_sysroot = '/'
|
||||||
|
|
||||||
|
return _cache_default_sysroot
|
||||||
|
|
||||||
|
def _supports_universal_builds():
|
||||||
|
"""Returns True if universal builds are supported on this system"""
|
||||||
|
# As an approximation, we assume that if we are running on 10.4 or above,
|
||||||
|
# then we are running with an Xcode environment that supports universal
|
||||||
|
# builds, in particular -isysroot and -arch arguments to the compiler. This
|
||||||
|
# is in support of allowing 10.4 universal builds to run on 10.3.x systems.
|
||||||
|
|
||||||
|
osx_version = _get_system_version_tuple()
|
||||||
|
return bool(osx_version >= (10, 4)) if osx_version else False
|
||||||
|
|
||||||
|
def _supports_arm64_builds():
|
||||||
|
"""Returns True if arm64 builds are supported on this system"""
|
||||||
|
# There are two sets of systems supporting macOS/arm64 builds:
|
||||||
|
# 1. macOS 11 and later, unconditionally
|
||||||
|
# 2. macOS 10.15 with Xcode 12.2 or later
|
||||||
|
# For now the second category is ignored.
|
||||||
|
osx_version = _get_system_version_tuple()
|
||||||
|
return osx_version >= (11, 0) if osx_version else False
|
||||||
|
|
||||||
|
|
||||||
|
def _find_appropriate_compiler(_config_vars):
|
||||||
|
"""Find appropriate C compiler for extension module builds"""
|
||||||
|
|
||||||
|
# Issue #13590:
|
||||||
|
# The OSX location for the compiler varies between OSX
|
||||||
|
# (or rather Xcode) releases. With older releases (up-to 10.5)
|
||||||
|
# the compiler is in /usr/bin, with newer releases the compiler
|
||||||
|
# can only be found inside Xcode.app if the "Command Line Tools"
|
||||||
|
# are not installed.
|
||||||
|
#
|
||||||
|
# Furthermore, the compiler that can be used varies between
|
||||||
|
# Xcode releases. Up to Xcode 4 it was possible to use 'gcc-4.2'
|
||||||
|
# as the compiler, after that 'clang' should be used because
|
||||||
|
# gcc-4.2 is either not present, or a copy of 'llvm-gcc' that
|
||||||
|
# miscompiles Python.
|
||||||
|
|
||||||
|
# skip checks if the compiler was overridden with a CC env variable
|
||||||
|
if 'CC' in os.environ:
|
||||||
|
return _config_vars
|
||||||
|
|
||||||
|
# The CC config var might contain additional arguments.
|
||||||
|
# Ignore them while searching.
|
||||||
|
cc = oldcc = _config_vars['CC'].split()[0]
|
||||||
|
if not _find_executable(cc):
|
||||||
|
# Compiler is not found on the shell search PATH.
|
||||||
|
# Now search for clang, first on PATH (if the Command LIne
|
||||||
|
# Tools have been installed in / or if the user has provided
|
||||||
|
# another location via CC). If not found, try using xcrun
|
||||||
|
# to find an uninstalled clang (within a selected Xcode).
|
||||||
|
|
||||||
|
# NOTE: Cannot use subprocess here because of bootstrap
|
||||||
|
# issues when building Python itself (and os.popen is
|
||||||
|
# implemented on top of subprocess and is therefore not
|
||||||
|
# usable as well)
|
||||||
|
|
||||||
|
cc = _find_build_tool('clang')
|
||||||
|
|
||||||
|
elif os.path.basename(cc).startswith('gcc'):
|
||||||
|
# Compiler is GCC, check if it is LLVM-GCC
|
||||||
|
data = _read_output("'%s' --version"
|
||||||
|
% (cc.replace("'", "'\"'\"'"),))
|
||||||
|
if data and 'llvm-gcc' in data:
|
||||||
|
# Found LLVM-GCC, fall back to clang
|
||||||
|
cc = _find_build_tool('clang')
|
||||||
|
|
||||||
|
if not cc:
|
||||||
|
raise SystemError(
|
||||||
|
"Cannot locate working compiler")
|
||||||
|
|
||||||
|
if cc != oldcc:
|
||||||
|
# Found a replacement compiler.
|
||||||
|
# Modify config vars using new compiler, if not already explicitly
|
||||||
|
# overridden by an env variable, preserving additional arguments.
|
||||||
|
for cv in _COMPILER_CONFIG_VARS:
|
||||||
|
if cv in _config_vars and cv not in os.environ:
|
||||||
|
cv_split = _config_vars[cv].split()
|
||||||
|
cv_split[0] = cc if cv != 'CXX' else cc + '++'
|
||||||
|
_save_modified_value(_config_vars, cv, ' '.join(cv_split))
|
||||||
|
|
||||||
|
return _config_vars
|
||||||
|
|
||||||
|
|
||||||
|
def _remove_universal_flags(_config_vars):
|
||||||
|
"""Remove all universal build arguments from config vars"""
|
||||||
|
|
||||||
|
for cv in _UNIVERSAL_CONFIG_VARS:
|
||||||
|
# Do not alter a config var explicitly overridden by env var
|
||||||
|
if cv in _config_vars and cv not in os.environ:
|
||||||
|
flags = _config_vars[cv]
|
||||||
|
flags = re.sub(r'-arch\s+\w+\s', ' ', flags, flags=re.ASCII)
|
||||||
|
flags = re.sub(r'-isysroot\s*\S+', ' ', flags)
|
||||||
|
_save_modified_value(_config_vars, cv, flags)
|
||||||
|
|
||||||
|
return _config_vars
|
||||||
|
|
||||||
|
|
||||||
|
def _remove_unsupported_archs(_config_vars):
|
||||||
|
"""Remove any unsupported archs from config vars"""
|
||||||
|
# Different Xcode releases support different sets for '-arch'
|
||||||
|
# flags. In particular, Xcode 4.x no longer supports the
|
||||||
|
# PPC architectures.
|
||||||
|
#
|
||||||
|
# This code automatically removes '-arch ppc' and '-arch ppc64'
|
||||||
|
# when these are not supported. That makes it possible to
|
||||||
|
# build extensions on OSX 10.7 and later with the prebuilt
|
||||||
|
# 32-bit installer on the python.org website.
|
||||||
|
|
||||||
|
# skip checks if the compiler was overridden with a CC env variable
|
||||||
|
if 'CC' in os.environ:
|
||||||
|
return _config_vars
|
||||||
|
|
||||||
|
if re.search(r'-arch\s+ppc', _config_vars['CFLAGS']) is not None:
|
||||||
|
# NOTE: Cannot use subprocess here because of bootstrap
|
||||||
|
# issues when building Python itself
|
||||||
|
status = os.system(
|
||||||
|
"""echo 'int main{};' | """
|
||||||
|
"""'%s' -c -arch ppc -x c -o /dev/null /dev/null 2>/dev/null"""
|
||||||
|
%(_config_vars['CC'].replace("'", "'\"'\"'"),))
|
||||||
|
if status:
|
||||||
|
# The compile failed for some reason. Because of differences
|
||||||
|
# across Xcode and compiler versions, there is no reliable way
|
||||||
|
# to be sure why it failed. Assume here it was due to lack of
|
||||||
|
# PPC support and remove the related '-arch' flags from each
|
||||||
|
# config variables not explicitly overridden by an environment
|
||||||
|
# variable. If the error was for some other reason, we hope the
|
||||||
|
# failure will show up again when trying to compile an extension
|
||||||
|
# module.
|
||||||
|
for cv in _UNIVERSAL_CONFIG_VARS:
|
||||||
|
if cv in _config_vars and cv not in os.environ:
|
||||||
|
flags = _config_vars[cv]
|
||||||
|
flags = re.sub(r'-arch\s+ppc\w*\s', ' ', flags)
|
||||||
|
_save_modified_value(_config_vars, cv, flags)
|
||||||
|
|
||||||
|
return _config_vars
|
||||||
|
|
||||||
|
|
||||||
|
def _override_all_archs(_config_vars):
|
||||||
|
"""Allow override of all archs with ARCHFLAGS env var"""
|
||||||
|
# NOTE: This name was introduced by Apple in OSX 10.5 and
|
||||||
|
# is used by several scripting languages distributed with
|
||||||
|
# that OS release.
|
||||||
|
if 'ARCHFLAGS' in os.environ:
|
||||||
|
arch = os.environ['ARCHFLAGS']
|
||||||
|
for cv in _UNIVERSAL_CONFIG_VARS:
|
||||||
|
if cv in _config_vars and '-arch' in _config_vars[cv]:
|
||||||
|
flags = _config_vars[cv]
|
||||||
|
flags = re.sub(r'-arch\s+\w+\s', ' ', flags)
|
||||||
|
flags = flags + ' ' + arch
|
||||||
|
_save_modified_value(_config_vars, cv, flags)
|
||||||
|
|
||||||
|
return _config_vars
|
||||||
|
|
||||||
|
|
||||||
|
def _check_for_unavailable_sdk(_config_vars):
|
||||||
|
"""Remove references to any SDKs not available"""
|
||||||
|
# If we're on OSX 10.5 or later and the user tries to
|
||||||
|
# compile an extension using an SDK that is not present
|
||||||
|
# on the current machine it is better to not use an SDK
|
||||||
|
# than to fail. This is particularly important with
|
||||||
|
# the standalone Command Line Tools alternative to a
|
||||||
|
# full-blown Xcode install since the CLT packages do not
|
||||||
|
# provide SDKs. If the SDK is not present, it is assumed
|
||||||
|
# that the header files and dev libs have been installed
|
||||||
|
# to /usr and /System/Library by either a standalone CLT
|
||||||
|
# package or the CLT component within Xcode.
|
||||||
|
cflags = _config_vars.get('CFLAGS', '')
|
||||||
|
m = re.search(r'-isysroot\s*(\S+)', cflags)
|
||||||
|
if m is not None:
|
||||||
|
sdk = m.group(1)
|
||||||
|
if not os.path.exists(sdk):
|
||||||
|
for cv in _UNIVERSAL_CONFIG_VARS:
|
||||||
|
# Do not alter a config var explicitly overridden by env var
|
||||||
|
if cv in _config_vars and cv not in os.environ:
|
||||||
|
flags = _config_vars[cv]
|
||||||
|
flags = re.sub(r'-isysroot\s*\S+(?:\s|$)', ' ', flags)
|
||||||
|
_save_modified_value(_config_vars, cv, flags)
|
||||||
|
|
||||||
|
return _config_vars
|
||||||
|
|
||||||
|
|
||||||
|
def compiler_fixup(compiler_so, cc_args):
|
||||||
|
"""
|
||||||
|
This function will strip '-isysroot PATH' and '-arch ARCH' from the
|
||||||
|
compile flags if the user has specified one them in extra_compile_flags.
|
||||||
|
|
||||||
|
This is needed because '-arch ARCH' adds another architecture to the
|
||||||
|
build, without a way to remove an architecture. Furthermore GCC will
|
||||||
|
barf if multiple '-isysroot' arguments are present.
|
||||||
|
"""
|
||||||
|
stripArch = stripSysroot = False
|
||||||
|
|
||||||
|
compiler_so = list(compiler_so)
|
||||||
|
|
||||||
|
if not _supports_universal_builds():
|
||||||
|
# OSX before 10.4.0, these don't support -arch and -isysroot at
|
||||||
|
# all.
|
||||||
|
stripArch = stripSysroot = True
|
||||||
|
else:
|
||||||
|
stripArch = '-arch' in cc_args
|
||||||
|
stripSysroot = any(arg for arg in cc_args if arg.startswith('-isysroot'))
|
||||||
|
|
||||||
|
if stripArch or 'ARCHFLAGS' in os.environ:
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
index = compiler_so.index('-arch')
|
||||||
|
# Strip this argument and the next one:
|
||||||
|
del compiler_so[index:index+2]
|
||||||
|
except ValueError:
|
||||||
|
break
|
||||||
|
|
||||||
|
elif not _supports_arm64_builds():
|
||||||
|
# Look for "-arch arm64" and drop that
|
||||||
|
for idx in reversed(range(len(compiler_so))):
|
||||||
|
if compiler_so[idx] == '-arch' and compiler_so[idx+1] == "arm64":
|
||||||
|
del compiler_so[idx:idx+2]
|
||||||
|
|
||||||
|
if 'ARCHFLAGS' in os.environ and not stripArch:
|
||||||
|
# User specified different -arch flags in the environ,
|
||||||
|
# see also distutils.sysconfig
|
||||||
|
compiler_so = compiler_so + os.environ['ARCHFLAGS'].split()
|
||||||
|
|
||||||
|
if stripSysroot:
|
||||||
|
while True:
|
||||||
|
indices = [i for i,x in enumerate(compiler_so) if x.startswith('-isysroot')]
|
||||||
|
if not indices:
|
||||||
|
break
|
||||||
|
index = indices[0]
|
||||||
|
if compiler_so[index] == '-isysroot':
|
||||||
|
# Strip this argument and the next one:
|
||||||
|
del compiler_so[index:index+2]
|
||||||
|
else:
|
||||||
|
# It's '-isysroot/some/path' in one arg
|
||||||
|
del compiler_so[index:index+1]
|
||||||
|
|
||||||
|
# Check if the SDK that is used during compilation actually exists,
|
||||||
|
# the universal build requires the usage of a universal SDK and not all
|
||||||
|
# users have that installed by default.
|
||||||
|
sysroot = None
|
||||||
|
argvar = cc_args
|
||||||
|
indices = [i for i,x in enumerate(cc_args) if x.startswith('-isysroot')]
|
||||||
|
if not indices:
|
||||||
|
argvar = compiler_so
|
||||||
|
indices = [i for i,x in enumerate(compiler_so) if x.startswith('-isysroot')]
|
||||||
|
|
||||||
|
for idx in indices:
|
||||||
|
if argvar[idx] == '-isysroot':
|
||||||
|
sysroot = argvar[idx+1]
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
sysroot = argvar[idx][len('-isysroot'):]
|
||||||
|
break
|
||||||
|
|
||||||
|
if sysroot and not os.path.isdir(sysroot):
|
||||||
|
sys.stderr.write(f"Compiling with an SDK that doesn't seem to exist: {sysroot}\n")
|
||||||
|
sys.stderr.write("Please check your Xcode installation\n")
|
||||||
|
sys.stderr.flush()
|
||||||
|
|
||||||
|
return compiler_so
|
||||||
|
|
||||||
|
|
||||||
|
def customize_config_vars(_config_vars):
|
||||||
|
"""Customize Python build configuration variables.
|
||||||
|
|
||||||
|
Called internally from sysconfig with a mutable mapping
|
||||||
|
containing name/value pairs parsed from the configured
|
||||||
|
makefile used to build this interpreter. Returns
|
||||||
|
the mapping updated as needed to reflect the environment
|
||||||
|
in which the interpreter is running; in the case of
|
||||||
|
a Python from a binary installer, the installed
|
||||||
|
environment may be very different from the build
|
||||||
|
environment, i.e. different OS levels, different
|
||||||
|
built tools, different available CPU architectures.
|
||||||
|
|
||||||
|
This customization is performed whenever
|
||||||
|
distutils.sysconfig.get_config_vars() is first
|
||||||
|
called. It may be used in environments where no
|
||||||
|
compilers are present, i.e. when installing pure
|
||||||
|
Python dists. Customization of compiler paths
|
||||||
|
and detection of unavailable archs is deferred
|
||||||
|
until the first extension module build is
|
||||||
|
requested (in distutils.sysconfig.customize_compiler).
|
||||||
|
|
||||||
|
Currently called from distutils.sysconfig
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not _supports_universal_builds():
|
||||||
|
# On Mac OS X before 10.4, check if -arch and -isysroot
|
||||||
|
# are in CFLAGS or LDFLAGS and remove them if they are.
|
||||||
|
# This is needed when building extensions on a 10.3 system
|
||||||
|
# using a universal build of python.
|
||||||
|
_remove_universal_flags(_config_vars)
|
||||||
|
|
||||||
|
# Allow user to override all archs with ARCHFLAGS env var
|
||||||
|
_override_all_archs(_config_vars)
|
||||||
|
|
||||||
|
# Remove references to sdks that are not found
|
||||||
|
_check_for_unavailable_sdk(_config_vars)
|
||||||
|
|
||||||
|
return _config_vars
|
||||||
|
|
||||||
|
|
||||||
|
def customize_compiler(_config_vars):
|
||||||
|
"""Customize compiler path and configuration variables.
|
||||||
|
|
||||||
|
This customization is performed when the first
|
||||||
|
extension module build is requested
|
||||||
|
in distutils.sysconfig.customize_compiler.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Find a compiler to use for extension module builds
|
||||||
|
_find_appropriate_compiler(_config_vars)
|
||||||
|
|
||||||
|
# Remove ppc arch flags if not supported here
|
||||||
|
_remove_unsupported_archs(_config_vars)
|
||||||
|
|
||||||
|
# Allow user to override all archs with ARCHFLAGS env var
|
||||||
|
_override_all_archs(_config_vars)
|
||||||
|
|
||||||
|
return _config_vars
|
||||||
|
|
||||||
|
|
||||||
|
def get_platform_osx(_config_vars, osname, release, machine):
|
||||||
|
"""Filter values for get_platform()"""
|
||||||
|
# called from get_platform() in sysconfig and distutils.util
|
||||||
|
#
|
||||||
|
# For our purposes, we'll assume that the system version from
|
||||||
|
# distutils' perspective is what MACOSX_DEPLOYMENT_TARGET is set
|
||||||
|
# to. This makes the compatibility story a bit more sane because the
|
||||||
|
# machine is going to compile and link as if it were
|
||||||
|
# MACOSX_DEPLOYMENT_TARGET.
|
||||||
|
|
||||||
|
macver = _config_vars.get('MACOSX_DEPLOYMENT_TARGET', '')
|
||||||
|
macrelease = _get_system_version() or macver
|
||||||
|
macver = macver or macrelease
|
||||||
|
|
||||||
|
if macver:
|
||||||
|
release = macver
|
||||||
|
osname = "macosx"
|
||||||
|
|
||||||
|
# Use the original CFLAGS value, if available, so that we
|
||||||
|
# return the same machine type for the platform string.
|
||||||
|
# Otherwise, distutils may consider this a cross-compiling
|
||||||
|
# case and disallow installs.
|
||||||
|
cflags = _config_vars.get(_INITPRE+'CFLAGS',
|
||||||
|
_config_vars.get('CFLAGS', ''))
|
||||||
|
if macrelease:
|
||||||
|
try:
|
||||||
|
macrelease = tuple(int(i) for i in macrelease.split('.')[0:2])
|
||||||
|
except ValueError:
|
||||||
|
macrelease = (10, 3)
|
||||||
|
else:
|
||||||
|
# assume no universal support
|
||||||
|
macrelease = (10, 3)
|
||||||
|
|
||||||
|
if (macrelease >= (10, 4)) and '-arch' in cflags.strip():
|
||||||
|
# The universal build will build fat binaries, but not on
|
||||||
|
# systems before 10.4
|
||||||
|
|
||||||
|
machine = 'fat'
|
||||||
|
|
||||||
|
archs = re.findall(r'-arch\s+(\S+)', cflags)
|
||||||
|
archs = tuple(sorted(set(archs)))
|
||||||
|
|
||||||
|
if len(archs) == 1:
|
||||||
|
machine = archs[0]
|
||||||
|
elif archs == ('arm64', 'x86_64'):
|
||||||
|
machine = 'universal2'
|
||||||
|
elif archs == ('i386', 'ppc'):
|
||||||
|
machine = 'fat'
|
||||||
|
elif archs == ('i386', 'x86_64'):
|
||||||
|
machine = 'intel'
|
||||||
|
elif archs == ('i386', 'ppc', 'x86_64'):
|
||||||
|
machine = 'fat3'
|
||||||
|
elif archs == ('ppc64', 'x86_64'):
|
||||||
|
machine = 'fat64'
|
||||||
|
elif archs == ('i386', 'ppc', 'ppc64', 'x86_64'):
|
||||||
|
machine = 'universal'
|
||||||
|
else:
|
||||||
|
raise ValueError(
|
||||||
|
"Don't know machine value for archs=%r" % (archs,))
|
||||||
|
|
||||||
|
elif machine == 'i386':
|
||||||
|
# On OSX the machine type returned by uname is always the
|
||||||
|
# 32-bit variant, even if the executable architecture is
|
||||||
|
# the 64-bit variant
|
||||||
|
if sys.maxsize >= 2**32:
|
||||||
|
machine = 'x86_64'
|
||||||
|
|
||||||
|
elif machine in ('PowerPC', 'Power_Macintosh'):
|
||||||
|
# Pick a sane name for the PPC architecture.
|
||||||
|
# See 'i386' case
|
||||||
|
if sys.maxsize >= 2**32:
|
||||||
|
machine = 'ppc64'
|
||||||
|
else:
|
||||||
|
machine = 'ppc'
|
||||||
|
|
||||||
|
return (osname, release, machine)
|
6410
Python310/Lib/_pydecimal.py
Normal file
6410
Python310/Lib/_pydecimal.py
Normal file
File diff suppressed because it is too large
Load Diff
2718
Python310/Lib/_pyio.py
Normal file
2718
Python310/Lib/_pyio.py
Normal file
File diff suppressed because it is too large
Load Diff
103
Python310/Lib/_sitebuiltins.py
Normal file
103
Python310/Lib/_sitebuiltins.py
Normal file
@ -0,0 +1,103 @@
|
|||||||
|
"""
|
||||||
|
The objects used by the site module to add custom builtins.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Those objects are almost immortal and they keep a reference to their module
|
||||||
|
# globals. Defining them in the site module would keep too many references
|
||||||
|
# alive.
|
||||||
|
# Note this means this module should also avoid keep things alive in its
|
||||||
|
# globals.
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
class Quitter(object):
|
||||||
|
def __init__(self, name, eof):
|
||||||
|
self.name = name
|
||||||
|
self.eof = eof
|
||||||
|
def __repr__(self):
|
||||||
|
return 'Use %s() or %s to exit' % (self.name, self.eof)
|
||||||
|
def __call__(self, code=None):
|
||||||
|
# Shells like IDLE catch the SystemExit, but listen when their
|
||||||
|
# stdin wrapper is closed.
|
||||||
|
try:
|
||||||
|
sys.stdin.close()
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
raise SystemExit(code)
|
||||||
|
|
||||||
|
|
||||||
|
class _Printer(object):
|
||||||
|
"""interactive prompt objects for printing the license text, a list of
|
||||||
|
contributors and the copyright notice."""
|
||||||
|
|
||||||
|
MAXLINES = 23
|
||||||
|
|
||||||
|
def __init__(self, name, data, files=(), dirs=()):
|
||||||
|
import os
|
||||||
|
self.__name = name
|
||||||
|
self.__data = data
|
||||||
|
self.__lines = None
|
||||||
|
self.__filenames = [os.path.join(dir, filename)
|
||||||
|
for dir in dirs
|
||||||
|
for filename in files]
|
||||||
|
|
||||||
|
def __setup(self):
|
||||||
|
if self.__lines:
|
||||||
|
return
|
||||||
|
data = None
|
||||||
|
for filename in self.__filenames:
|
||||||
|
try:
|
||||||
|
with open(filename, encoding='utf-8') as fp:
|
||||||
|
data = fp.read()
|
||||||
|
break
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
if not data:
|
||||||
|
data = self.__data
|
||||||
|
self.__lines = data.split('\n')
|
||||||
|
self.__linecnt = len(self.__lines)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
self.__setup()
|
||||||
|
if len(self.__lines) <= self.MAXLINES:
|
||||||
|
return "\n".join(self.__lines)
|
||||||
|
else:
|
||||||
|
return "Type %s() to see the full %s text" % ((self.__name,)*2)
|
||||||
|
|
||||||
|
def __call__(self):
|
||||||
|
self.__setup()
|
||||||
|
prompt = 'Hit Return for more, or q (and Return) to quit: '
|
||||||
|
lineno = 0
|
||||||
|
while 1:
|
||||||
|
try:
|
||||||
|
for i in range(lineno, lineno + self.MAXLINES):
|
||||||
|
print(self.__lines[i])
|
||||||
|
except IndexError:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
lineno += self.MAXLINES
|
||||||
|
key = None
|
||||||
|
while key is None:
|
||||||
|
key = input(prompt)
|
||||||
|
if key not in ('', 'q'):
|
||||||
|
key = None
|
||||||
|
if key == 'q':
|
||||||
|
break
|
||||||
|
|
||||||
|
|
||||||
|
class _Helper(object):
|
||||||
|
"""Define the builtin 'help'.
|
||||||
|
|
||||||
|
This is a wrapper around pydoc.help that provides a helpful message
|
||||||
|
when 'help' is typed at the Python interactive prompt.
|
||||||
|
|
||||||
|
Calling help() at the Python prompt starts an interactive help session.
|
||||||
|
Calling help(thing) prints help for the python object 'thing'.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "Type help() for interactive help, " \
|
||||||
|
"or help(object) for help about object."
|
||||||
|
def __call__(self, *args, **kwds):
|
||||||
|
import pydoc
|
||||||
|
return pydoc.help(*args, **kwds)
|
188
Python310/Lib/abc.py
Normal file
188
Python310/Lib/abc.py
Normal file
@ -0,0 +1,188 @@
|
|||||||
|
# Copyright 2007 Google, Inc. All Rights Reserved.
|
||||||
|
# Licensed to PSF under a Contributor Agreement.
|
||||||
|
|
||||||
|
"""Abstract Base Classes (ABCs) according to PEP 3119."""
|
||||||
|
|
||||||
|
|
||||||
|
def abstractmethod(funcobj):
|
||||||
|
"""A decorator indicating abstract methods.
|
||||||
|
|
||||||
|
Requires that the metaclass is ABCMeta or derived from it. A
|
||||||
|
class that has a metaclass derived from ABCMeta cannot be
|
||||||
|
instantiated unless all of its abstract methods are overridden.
|
||||||
|
The abstract methods can be called using any of the normal
|
||||||
|
'super' call mechanisms. abstractmethod() may be used to declare
|
||||||
|
abstract methods for properties and descriptors.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
|
||||||
|
class C(metaclass=ABCMeta):
|
||||||
|
@abstractmethod
|
||||||
|
def my_abstract_method(self, ...):
|
||||||
|
...
|
||||||
|
"""
|
||||||
|
funcobj.__isabstractmethod__ = True
|
||||||
|
return funcobj
|
||||||
|
|
||||||
|
|
||||||
|
class abstractclassmethod(classmethod):
|
||||||
|
"""A decorator indicating abstract classmethods.
|
||||||
|
|
||||||
|
Deprecated, use 'classmethod' with 'abstractmethod' instead:
|
||||||
|
|
||||||
|
class C(ABC):
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
def my_abstract_classmethod(cls, ...):
|
||||||
|
...
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
__isabstractmethod__ = True
|
||||||
|
|
||||||
|
def __init__(self, callable):
|
||||||
|
callable.__isabstractmethod__ = True
|
||||||
|
super().__init__(callable)
|
||||||
|
|
||||||
|
|
||||||
|
class abstractstaticmethod(staticmethod):
|
||||||
|
"""A decorator indicating abstract staticmethods.
|
||||||
|
|
||||||
|
Deprecated, use 'staticmethod' with 'abstractmethod' instead:
|
||||||
|
|
||||||
|
class C(ABC):
|
||||||
|
@staticmethod
|
||||||
|
@abstractmethod
|
||||||
|
def my_abstract_staticmethod(...):
|
||||||
|
...
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
__isabstractmethod__ = True
|
||||||
|
|
||||||
|
def __init__(self, callable):
|
||||||
|
callable.__isabstractmethod__ = True
|
||||||
|
super().__init__(callable)
|
||||||
|
|
||||||
|
|
||||||
|
class abstractproperty(property):
|
||||||
|
"""A decorator indicating abstract properties.
|
||||||
|
|
||||||
|
Deprecated, use 'property' with 'abstractmethod' instead:
|
||||||
|
|
||||||
|
class C(ABC):
|
||||||
|
@property
|
||||||
|
@abstractmethod
|
||||||
|
def my_abstract_property(self):
|
||||||
|
...
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
__isabstractmethod__ = True
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
from _abc import (get_cache_token, _abc_init, _abc_register,
|
||||||
|
_abc_instancecheck, _abc_subclasscheck, _get_dump,
|
||||||
|
_reset_registry, _reset_caches)
|
||||||
|
except ImportError:
|
||||||
|
from _py_abc import ABCMeta, get_cache_token
|
||||||
|
ABCMeta.__module__ = 'abc'
|
||||||
|
else:
|
||||||
|
class ABCMeta(type):
|
||||||
|
"""Metaclass for defining Abstract Base Classes (ABCs).
|
||||||
|
|
||||||
|
Use this metaclass to create an ABC. An ABC can be subclassed
|
||||||
|
directly, and then acts as a mix-in class. You can also register
|
||||||
|
unrelated concrete classes (even built-in classes) and unrelated
|
||||||
|
ABCs as 'virtual subclasses' -- these and their descendants will
|
||||||
|
be considered subclasses of the registering ABC by the built-in
|
||||||
|
issubclass() function, but the registering ABC won't show up in
|
||||||
|
their MRO (Method Resolution Order) nor will method
|
||||||
|
implementations defined by the registering ABC be callable (not
|
||||||
|
even via super()).
|
||||||
|
"""
|
||||||
|
def __new__(mcls, name, bases, namespace, **kwargs):
|
||||||
|
cls = super().__new__(mcls, name, bases, namespace, **kwargs)
|
||||||
|
_abc_init(cls)
|
||||||
|
return cls
|
||||||
|
|
||||||
|
def register(cls, subclass):
|
||||||
|
"""Register a virtual subclass of an ABC.
|
||||||
|
|
||||||
|
Returns the subclass, to allow usage as a class decorator.
|
||||||
|
"""
|
||||||
|
return _abc_register(cls, subclass)
|
||||||
|
|
||||||
|
def __instancecheck__(cls, instance):
|
||||||
|
"""Override for isinstance(instance, cls)."""
|
||||||
|
return _abc_instancecheck(cls, instance)
|
||||||
|
|
||||||
|
def __subclasscheck__(cls, subclass):
|
||||||
|
"""Override for issubclass(subclass, cls)."""
|
||||||
|
return _abc_subclasscheck(cls, subclass)
|
||||||
|
|
||||||
|
def _dump_registry(cls, file=None):
|
||||||
|
"""Debug helper to print the ABC registry."""
|
||||||
|
print(f"Class: {cls.__module__}.{cls.__qualname__}", file=file)
|
||||||
|
print(f"Inv. counter: {get_cache_token()}", file=file)
|
||||||
|
(_abc_registry, _abc_cache, _abc_negative_cache,
|
||||||
|
_abc_negative_cache_version) = _get_dump(cls)
|
||||||
|
print(f"_abc_registry: {_abc_registry!r}", file=file)
|
||||||
|
print(f"_abc_cache: {_abc_cache!r}", file=file)
|
||||||
|
print(f"_abc_negative_cache: {_abc_negative_cache!r}", file=file)
|
||||||
|
print(f"_abc_negative_cache_version: {_abc_negative_cache_version!r}",
|
||||||
|
file=file)
|
||||||
|
|
||||||
|
def _abc_registry_clear(cls):
|
||||||
|
"""Clear the registry (for debugging or testing)."""
|
||||||
|
_reset_registry(cls)
|
||||||
|
|
||||||
|
def _abc_caches_clear(cls):
|
||||||
|
"""Clear the caches (for debugging or testing)."""
|
||||||
|
_reset_caches(cls)
|
||||||
|
|
||||||
|
|
||||||
|
def update_abstractmethods(cls):
|
||||||
|
"""Recalculate the set of abstract methods of an abstract class.
|
||||||
|
|
||||||
|
If a class has had one of its abstract methods implemented after the
|
||||||
|
class was created, the method will not be considered implemented until
|
||||||
|
this function is called. Alternatively, if a new abstract method has been
|
||||||
|
added to the class, it will only be considered an abstract method of the
|
||||||
|
class after this function is called.
|
||||||
|
|
||||||
|
This function should be called before any use is made of the class,
|
||||||
|
usually in class decorators that add methods to the subject class.
|
||||||
|
|
||||||
|
Returns cls, to allow usage as a class decorator.
|
||||||
|
|
||||||
|
If cls is not an instance of ABCMeta, does nothing.
|
||||||
|
"""
|
||||||
|
if not hasattr(cls, '__abstractmethods__'):
|
||||||
|
# We check for __abstractmethods__ here because cls might by a C
|
||||||
|
# implementation or a python implementation (especially during
|
||||||
|
# testing), and we want to handle both cases.
|
||||||
|
return cls
|
||||||
|
|
||||||
|
abstracts = set()
|
||||||
|
# Check the existing abstract methods of the parents, keep only the ones
|
||||||
|
# that are not implemented.
|
||||||
|
for scls in cls.__bases__:
|
||||||
|
for name in getattr(scls, '__abstractmethods__', ()):
|
||||||
|
value = getattr(cls, name, None)
|
||||||
|
if getattr(value, "__isabstractmethod__", False):
|
||||||
|
abstracts.add(name)
|
||||||
|
# Also add any other newly added abstract methods.
|
||||||
|
for name, value in cls.__dict__.items():
|
||||||
|
if getattr(value, "__isabstractmethod__", False):
|
||||||
|
abstracts.add(name)
|
||||||
|
cls.__abstractmethods__ = frozenset(abstracts)
|
||||||
|
return cls
|
||||||
|
|
||||||
|
|
||||||
|
class ABC(metaclass=ABCMeta):
|
||||||
|
"""Helper class that provides a standard way to create an ABC using
|
||||||
|
inheritance.
|
||||||
|
"""
|
||||||
|
__slots__ = ()
|
2583
Python310/Lib/argparse.py
Normal file
2583
Python310/Lib/argparse.py
Normal file
File diff suppressed because it is too large
Load Diff
1701
Python310/Lib/ast.py
Normal file
1701
Python310/Lib/ast.py
Normal file
File diff suppressed because it is too large
Load Diff
315
Python310/Lib/asynchat.py
Normal file
315
Python310/Lib/asynchat.py
Normal file
@ -0,0 +1,315 @@
|
|||||||
|
# -*- Mode: Python; tab-width: 4 -*-
|
||||||
|
# Id: asynchat.py,v 2.26 2000/09/07 22:29:26 rushing Exp
|
||||||
|
# Author: Sam Rushing <rushing@nightmare.com>
|
||||||
|
|
||||||
|
# ======================================================================
|
||||||
|
# Copyright 1996 by Sam Rushing
|
||||||
|
#
|
||||||
|
# All Rights Reserved
|
||||||
|
#
|
||||||
|
# Permission to use, copy, modify, and distribute this software and
|
||||||
|
# its documentation for any purpose and without fee is hereby
|
||||||
|
# granted, provided that the above copyright notice appear in all
|
||||||
|
# copies and that both that copyright notice and this permission
|
||||||
|
# notice appear in supporting documentation, and that the name of Sam
|
||||||
|
# Rushing not be used in advertising or publicity pertaining to
|
||||||
|
# distribution of the software without specific, written prior
|
||||||
|
# permission.
|
||||||
|
#
|
||||||
|
# SAM RUSHING DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
|
||||||
|
# INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN
|
||||||
|
# NO EVENT SHALL SAM RUSHING BE LIABLE FOR ANY SPECIAL, INDIRECT OR
|
||||||
|
# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
|
||||||
|
# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
|
||||||
|
# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
|
||||||
|
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
# ======================================================================
|
||||||
|
|
||||||
|
r"""A class supporting chat-style (command/response) protocols.
|
||||||
|
|
||||||
|
This class adds support for 'chat' style protocols - where one side
|
||||||
|
sends a 'command', and the other sends a response (examples would be
|
||||||
|
the common internet protocols - smtp, nntp, ftp, etc..).
|
||||||
|
|
||||||
|
The handle_read() method looks at the input stream for the current
|
||||||
|
'terminator' (usually '\r\n' for single-line responses, '\r\n.\r\n'
|
||||||
|
for multi-line output), calling self.found_terminator() on its
|
||||||
|
receipt.
|
||||||
|
|
||||||
|
for example:
|
||||||
|
Say you build an async nntp client using this class. At the start
|
||||||
|
of the connection, you'll have self.terminator set to '\r\n', in
|
||||||
|
order to process the single-line greeting. Just before issuing a
|
||||||
|
'LIST' command you'll set it to '\r\n.\r\n'. The output of the LIST
|
||||||
|
command will be accumulated (using your own 'collect_incoming_data'
|
||||||
|
method) up to the terminator, and then control will be returned to
|
||||||
|
you - by calling your self.found_terminator() method.
|
||||||
|
"""
|
||||||
|
import asyncore
|
||||||
|
from collections import deque
|
||||||
|
|
||||||
|
from warnings import warn
|
||||||
|
warn(
|
||||||
|
'The asynchat module is deprecated and will be removed in Python 3.12. '
|
||||||
|
'The recommended replacement is asyncio',
|
||||||
|
DeprecationWarning,
|
||||||
|
stacklevel=2)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class async_chat(asyncore.dispatcher):
|
||||||
|
"""This is an abstract class. You must derive from this class, and add
|
||||||
|
the two methods collect_incoming_data() and found_terminator()"""
|
||||||
|
|
||||||
|
# these are overridable defaults
|
||||||
|
|
||||||
|
ac_in_buffer_size = 65536
|
||||||
|
ac_out_buffer_size = 65536
|
||||||
|
|
||||||
|
# we don't want to enable the use of encoding by default, because that is a
|
||||||
|
# sign of an application bug that we don't want to pass silently
|
||||||
|
|
||||||
|
use_encoding = 0
|
||||||
|
encoding = 'latin-1'
|
||||||
|
|
||||||
|
def __init__(self, sock=None, map=None):
|
||||||
|
# for string terminator matching
|
||||||
|
self.ac_in_buffer = b''
|
||||||
|
|
||||||
|
# we use a list here rather than io.BytesIO for a few reasons...
|
||||||
|
# del lst[:] is faster than bio.truncate(0)
|
||||||
|
# lst = [] is faster than bio.truncate(0)
|
||||||
|
self.incoming = []
|
||||||
|
|
||||||
|
# we toss the use of the "simple producer" and replace it with
|
||||||
|
# a pure deque, which the original fifo was a wrapping of
|
||||||
|
self.producer_fifo = deque()
|
||||||
|
asyncore.dispatcher.__init__(self, sock, map)
|
||||||
|
|
||||||
|
def collect_incoming_data(self, data):
|
||||||
|
raise NotImplementedError("must be implemented in subclass")
|
||||||
|
|
||||||
|
def _collect_incoming_data(self, data):
|
||||||
|
self.incoming.append(data)
|
||||||
|
|
||||||
|
def _get_data(self):
|
||||||
|
d = b''.join(self.incoming)
|
||||||
|
del self.incoming[:]
|
||||||
|
return d
|
||||||
|
|
||||||
|
def found_terminator(self):
|
||||||
|
raise NotImplementedError("must be implemented in subclass")
|
||||||
|
|
||||||
|
def set_terminator(self, term):
|
||||||
|
"""Set the input delimiter.
|
||||||
|
|
||||||
|
Can be a fixed string of any length, an integer, or None.
|
||||||
|
"""
|
||||||
|
if isinstance(term, str) and self.use_encoding:
|
||||||
|
term = bytes(term, self.encoding)
|
||||||
|
elif isinstance(term, int) and term < 0:
|
||||||
|
raise ValueError('the number of received bytes must be positive')
|
||||||
|
self.terminator = term
|
||||||
|
|
||||||
|
def get_terminator(self):
|
||||||
|
return self.terminator
|
||||||
|
|
||||||
|
# grab some more data from the socket,
|
||||||
|
# throw it to the collector method,
|
||||||
|
# check for the terminator,
|
||||||
|
# if found, transition to the next state.
|
||||||
|
|
||||||
|
def handle_read(self):
|
||||||
|
|
||||||
|
try:
|
||||||
|
data = self.recv(self.ac_in_buffer_size)
|
||||||
|
except BlockingIOError:
|
||||||
|
return
|
||||||
|
except OSError:
|
||||||
|
self.handle_error()
|
||||||
|
return
|
||||||
|
|
||||||
|
if isinstance(data, str) and self.use_encoding:
|
||||||
|
data = bytes(str, self.encoding)
|
||||||
|
self.ac_in_buffer = self.ac_in_buffer + data
|
||||||
|
|
||||||
|
# Continue to search for self.terminator in self.ac_in_buffer,
|
||||||
|
# while calling self.collect_incoming_data. The while loop
|
||||||
|
# is necessary because we might read several data+terminator
|
||||||
|
# combos with a single recv(4096).
|
||||||
|
|
||||||
|
while self.ac_in_buffer:
|
||||||
|
lb = len(self.ac_in_buffer)
|
||||||
|
terminator = self.get_terminator()
|
||||||
|
if not terminator:
|
||||||
|
# no terminator, collect it all
|
||||||
|
self.collect_incoming_data(self.ac_in_buffer)
|
||||||
|
self.ac_in_buffer = b''
|
||||||
|
elif isinstance(terminator, int):
|
||||||
|
# numeric terminator
|
||||||
|
n = terminator
|
||||||
|
if lb < n:
|
||||||
|
self.collect_incoming_data(self.ac_in_buffer)
|
||||||
|
self.ac_in_buffer = b''
|
||||||
|
self.terminator = self.terminator - lb
|
||||||
|
else:
|
||||||
|
self.collect_incoming_data(self.ac_in_buffer[:n])
|
||||||
|
self.ac_in_buffer = self.ac_in_buffer[n:]
|
||||||
|
self.terminator = 0
|
||||||
|
self.found_terminator()
|
||||||
|
else:
|
||||||
|
# 3 cases:
|
||||||
|
# 1) end of buffer matches terminator exactly:
|
||||||
|
# collect data, transition
|
||||||
|
# 2) end of buffer matches some prefix:
|
||||||
|
# collect data to the prefix
|
||||||
|
# 3) end of buffer does not match any prefix:
|
||||||
|
# collect data
|
||||||
|
terminator_len = len(terminator)
|
||||||
|
index = self.ac_in_buffer.find(terminator)
|
||||||
|
if index != -1:
|
||||||
|
# we found the terminator
|
||||||
|
if index > 0:
|
||||||
|
# don't bother reporting the empty string
|
||||||
|
# (source of subtle bugs)
|
||||||
|
self.collect_incoming_data(self.ac_in_buffer[:index])
|
||||||
|
self.ac_in_buffer = self.ac_in_buffer[index+terminator_len:]
|
||||||
|
# This does the Right Thing if the terminator
|
||||||
|
# is changed here.
|
||||||
|
self.found_terminator()
|
||||||
|
else:
|
||||||
|
# check for a prefix of the terminator
|
||||||
|
index = find_prefix_at_end(self.ac_in_buffer, terminator)
|
||||||
|
if index:
|
||||||
|
if index != lb:
|
||||||
|
# we found a prefix, collect up to the prefix
|
||||||
|
self.collect_incoming_data(self.ac_in_buffer[:-index])
|
||||||
|
self.ac_in_buffer = self.ac_in_buffer[-index:]
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
# no prefix, collect it all
|
||||||
|
self.collect_incoming_data(self.ac_in_buffer)
|
||||||
|
self.ac_in_buffer = b''
|
||||||
|
|
||||||
|
def handle_write(self):
|
||||||
|
self.initiate_send()
|
||||||
|
|
||||||
|
def handle_close(self):
|
||||||
|
self.close()
|
||||||
|
|
||||||
|
def push(self, data):
|
||||||
|
if not isinstance(data, (bytes, bytearray, memoryview)):
|
||||||
|
raise TypeError('data argument must be byte-ish (%r)',
|
||||||
|
type(data))
|
||||||
|
sabs = self.ac_out_buffer_size
|
||||||
|
if len(data) > sabs:
|
||||||
|
for i in range(0, len(data), sabs):
|
||||||
|
self.producer_fifo.append(data[i:i+sabs])
|
||||||
|
else:
|
||||||
|
self.producer_fifo.append(data)
|
||||||
|
self.initiate_send()
|
||||||
|
|
||||||
|
def push_with_producer(self, producer):
|
||||||
|
self.producer_fifo.append(producer)
|
||||||
|
self.initiate_send()
|
||||||
|
|
||||||
|
def readable(self):
|
||||||
|
"predicate for inclusion in the readable for select()"
|
||||||
|
# cannot use the old predicate, it violates the claim of the
|
||||||
|
# set_terminator method.
|
||||||
|
|
||||||
|
# return (len(self.ac_in_buffer) <= self.ac_in_buffer_size)
|
||||||
|
return 1
|
||||||
|
|
||||||
|
def writable(self):
|
||||||
|
"predicate for inclusion in the writable for select()"
|
||||||
|
return self.producer_fifo or (not self.connected)
|
||||||
|
|
||||||
|
def close_when_done(self):
|
||||||
|
"automatically close this channel once the outgoing queue is empty"
|
||||||
|
self.producer_fifo.append(None)
|
||||||
|
|
||||||
|
def initiate_send(self):
|
||||||
|
while self.producer_fifo and self.connected:
|
||||||
|
first = self.producer_fifo[0]
|
||||||
|
# handle empty string/buffer or None entry
|
||||||
|
if not first:
|
||||||
|
del self.producer_fifo[0]
|
||||||
|
if first is None:
|
||||||
|
self.handle_close()
|
||||||
|
return
|
||||||
|
|
||||||
|
# handle classic producer behavior
|
||||||
|
obs = self.ac_out_buffer_size
|
||||||
|
try:
|
||||||
|
data = first[:obs]
|
||||||
|
except TypeError:
|
||||||
|
data = first.more()
|
||||||
|
if data:
|
||||||
|
self.producer_fifo.appendleft(data)
|
||||||
|
else:
|
||||||
|
del self.producer_fifo[0]
|
||||||
|
continue
|
||||||
|
|
||||||
|
if isinstance(data, str) and self.use_encoding:
|
||||||
|
data = bytes(data, self.encoding)
|
||||||
|
|
||||||
|
# send the data
|
||||||
|
try:
|
||||||
|
num_sent = self.send(data)
|
||||||
|
except OSError:
|
||||||
|
self.handle_error()
|
||||||
|
return
|
||||||
|
|
||||||
|
if num_sent:
|
||||||
|
if num_sent < len(data) or obs < len(first):
|
||||||
|
self.producer_fifo[0] = first[num_sent:]
|
||||||
|
else:
|
||||||
|
del self.producer_fifo[0]
|
||||||
|
# we tried to send some actual data
|
||||||
|
return
|
||||||
|
|
||||||
|
def discard_buffers(self):
|
||||||
|
# Emergencies only!
|
||||||
|
self.ac_in_buffer = b''
|
||||||
|
del self.incoming[:]
|
||||||
|
self.producer_fifo.clear()
|
||||||
|
|
||||||
|
|
||||||
|
class simple_producer:
|
||||||
|
|
||||||
|
def __init__(self, data, buffer_size=512):
|
||||||
|
self.data = data
|
||||||
|
self.buffer_size = buffer_size
|
||||||
|
|
||||||
|
def more(self):
|
||||||
|
if len(self.data) > self.buffer_size:
|
||||||
|
result = self.data[:self.buffer_size]
|
||||||
|
self.data = self.data[self.buffer_size:]
|
||||||
|
return result
|
||||||
|
else:
|
||||||
|
result = self.data
|
||||||
|
self.data = b''
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
# Given 'haystack', see if any prefix of 'needle' is at its end. This
|
||||||
|
# assumes an exact match has already been checked. Return the number of
|
||||||
|
# characters matched.
|
||||||
|
# for example:
|
||||||
|
# f_p_a_e("qwerty\r", "\r\n") => 1
|
||||||
|
# f_p_a_e("qwertydkjf", "\r\n") => 0
|
||||||
|
# f_p_a_e("qwerty\r\n", "\r\n") => <undefined>
|
||||||
|
|
||||||
|
# this could maybe be made faster with a computed regex?
|
||||||
|
# [answer: no; circa Python-2.0, Jan 2001]
|
||||||
|
# new python: 28961/s
|
||||||
|
# old python: 18307/s
|
||||||
|
# re: 12820/s
|
||||||
|
# regex: 14035/s
|
||||||
|
|
||||||
|
def find_prefix_at_end(haystack, needle):
|
||||||
|
l = len(needle) - 1
|
||||||
|
while l and not haystack.endswith(needle[:l]):
|
||||||
|
l -= 1
|
||||||
|
return l
|
43
Python310/Lib/asyncio/__init__.py
Normal file
43
Python310/Lib/asyncio/__init__.py
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
"""The asyncio package, tracking PEP 3156."""
|
||||||
|
|
||||||
|
# flake8: noqa
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
# This relies on each of the submodules having an __all__ variable.
|
||||||
|
from .base_events import *
|
||||||
|
from .coroutines import *
|
||||||
|
from .events import *
|
||||||
|
from .exceptions import *
|
||||||
|
from .futures import *
|
||||||
|
from .locks import *
|
||||||
|
from .protocols import *
|
||||||
|
from .runners import *
|
||||||
|
from .queues import *
|
||||||
|
from .streams import *
|
||||||
|
from .subprocess import *
|
||||||
|
from .tasks import *
|
||||||
|
from .threads import *
|
||||||
|
from .transports import *
|
||||||
|
|
||||||
|
__all__ = (base_events.__all__ +
|
||||||
|
coroutines.__all__ +
|
||||||
|
events.__all__ +
|
||||||
|
exceptions.__all__ +
|
||||||
|
futures.__all__ +
|
||||||
|
locks.__all__ +
|
||||||
|
protocols.__all__ +
|
||||||
|
runners.__all__ +
|
||||||
|
queues.__all__ +
|
||||||
|
streams.__all__ +
|
||||||
|
subprocess.__all__ +
|
||||||
|
tasks.__all__ +
|
||||||
|
threads.__all__ +
|
||||||
|
transports.__all__)
|
||||||
|
|
||||||
|
if sys.platform == 'win32': # pragma: no cover
|
||||||
|
from .windows_events import *
|
||||||
|
__all__ += windows_events.__all__
|
||||||
|
else:
|
||||||
|
from .unix_events import * # pragma: no cover
|
||||||
|
__all__ += unix_events.__all__
|
1921
Python310/Lib/asyncio/base_events.py
Normal file
1921
Python310/Lib/asyncio/base_events.py
Normal file
File diff suppressed because it is too large
Load Diff
818
Python310/Lib/asyncio/events.py
Normal file
818
Python310/Lib/asyncio/events.py
Normal file
@ -0,0 +1,818 @@
|
|||||||
|
"""Event loop and event loop policy."""
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
'AbstractEventLoopPolicy',
|
||||||
|
'AbstractEventLoop', 'AbstractServer',
|
||||||
|
'Handle', 'TimerHandle',
|
||||||
|
'get_event_loop_policy', 'set_event_loop_policy',
|
||||||
|
'get_event_loop', 'set_event_loop', 'new_event_loop',
|
||||||
|
'get_child_watcher', 'set_child_watcher',
|
||||||
|
'_set_running_loop', 'get_running_loop',
|
||||||
|
'_get_running_loop',
|
||||||
|
)
|
||||||
|
|
||||||
|
import contextvars
|
||||||
|
import os
|
||||||
|
import socket
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import threading
|
||||||
|
|
||||||
|
from . import format_helpers
|
||||||
|
|
||||||
|
|
||||||
|
class Handle:
|
||||||
|
"""Object returned by callback registration methods."""
|
||||||
|
|
||||||
|
__slots__ = ('_callback', '_args', '_cancelled', '_loop',
|
||||||
|
'_source_traceback', '_repr', '__weakref__',
|
||||||
|
'_context')
|
||||||
|
|
||||||
|
def __init__(self, callback, args, loop, context=None):
|
||||||
|
if context is None:
|
||||||
|
context = contextvars.copy_context()
|
||||||
|
self._context = context
|
||||||
|
self._loop = loop
|
||||||
|
self._callback = callback
|
||||||
|
self._args = args
|
||||||
|
self._cancelled = False
|
||||||
|
self._repr = None
|
||||||
|
if self._loop.get_debug():
|
||||||
|
self._source_traceback = format_helpers.extract_stack(
|
||||||
|
sys._getframe(1))
|
||||||
|
else:
|
||||||
|
self._source_traceback = None
|
||||||
|
|
||||||
|
def _repr_info(self):
|
||||||
|
info = [self.__class__.__name__]
|
||||||
|
if self._cancelled:
|
||||||
|
info.append('cancelled')
|
||||||
|
if self._callback is not None:
|
||||||
|
info.append(format_helpers._format_callback_source(
|
||||||
|
self._callback, self._args))
|
||||||
|
if self._source_traceback:
|
||||||
|
frame = self._source_traceback[-1]
|
||||||
|
info.append(f'created at {frame[0]}:{frame[1]}')
|
||||||
|
return info
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
if self._repr is not None:
|
||||||
|
return self._repr
|
||||||
|
info = self._repr_info()
|
||||||
|
return '<{}>'.format(' '.join(info))
|
||||||
|
|
||||||
|
def cancel(self):
|
||||||
|
if not self._cancelled:
|
||||||
|
self._cancelled = True
|
||||||
|
if self._loop.get_debug():
|
||||||
|
# Keep a representation in debug mode to keep callback and
|
||||||
|
# parameters. For example, to log the warning
|
||||||
|
# "Executing <Handle...> took 2.5 second"
|
||||||
|
self._repr = repr(self)
|
||||||
|
self._callback = None
|
||||||
|
self._args = None
|
||||||
|
|
||||||
|
def cancelled(self):
|
||||||
|
return self._cancelled
|
||||||
|
|
||||||
|
def _run(self):
|
||||||
|
try:
|
||||||
|
self._context.run(self._callback, *self._args)
|
||||||
|
except (SystemExit, KeyboardInterrupt):
|
||||||
|
raise
|
||||||
|
except BaseException as exc:
|
||||||
|
cb = format_helpers._format_callback_source(
|
||||||
|
self._callback, self._args)
|
||||||
|
msg = f'Exception in callback {cb}'
|
||||||
|
context = {
|
||||||
|
'message': msg,
|
||||||
|
'exception': exc,
|
||||||
|
'handle': self,
|
||||||
|
}
|
||||||
|
if self._source_traceback:
|
||||||
|
context['source_traceback'] = self._source_traceback
|
||||||
|
self._loop.call_exception_handler(context)
|
||||||
|
self = None # Needed to break cycles when an exception occurs.
|
||||||
|
|
||||||
|
|
||||||
|
class TimerHandle(Handle):
|
||||||
|
"""Object returned by timed callback registration methods."""
|
||||||
|
|
||||||
|
__slots__ = ['_scheduled', '_when']
|
||||||
|
|
||||||
|
def __init__(self, when, callback, args, loop, context=None):
|
||||||
|
assert when is not None
|
||||||
|
super().__init__(callback, args, loop, context)
|
||||||
|
if self._source_traceback:
|
||||||
|
del self._source_traceback[-1]
|
||||||
|
self._when = when
|
||||||
|
self._scheduled = False
|
||||||
|
|
||||||
|
def _repr_info(self):
|
||||||
|
info = super()._repr_info()
|
||||||
|
pos = 2 if self._cancelled else 1
|
||||||
|
info.insert(pos, f'when={self._when}')
|
||||||
|
return info
|
||||||
|
|
||||||
|
def __hash__(self):
|
||||||
|
return hash(self._when)
|
||||||
|
|
||||||
|
def __lt__(self, other):
|
||||||
|
if isinstance(other, TimerHandle):
|
||||||
|
return self._when < other._when
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
def __le__(self, other):
|
||||||
|
if isinstance(other, TimerHandle):
|
||||||
|
return self._when < other._when or self.__eq__(other)
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
def __gt__(self, other):
|
||||||
|
if isinstance(other, TimerHandle):
|
||||||
|
return self._when > other._when
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
def __ge__(self, other):
|
||||||
|
if isinstance(other, TimerHandle):
|
||||||
|
return self._when > other._when or self.__eq__(other)
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
if isinstance(other, TimerHandle):
|
||||||
|
return (self._when == other._when and
|
||||||
|
self._callback == other._callback and
|
||||||
|
self._args == other._args and
|
||||||
|
self._cancelled == other._cancelled)
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
def cancel(self):
|
||||||
|
if not self._cancelled:
|
||||||
|
self._loop._timer_handle_cancelled(self)
|
||||||
|
super().cancel()
|
||||||
|
|
||||||
|
def when(self):
|
||||||
|
"""Return a scheduled callback time.
|
||||||
|
|
||||||
|
The time is an absolute timestamp, using the same time
|
||||||
|
reference as loop.time().
|
||||||
|
"""
|
||||||
|
return self._when
|
||||||
|
|
||||||
|
|
||||||
|
class AbstractServer:
|
||||||
|
"""Abstract server returned by create_server()."""
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
"""Stop serving. This leaves existing connections open."""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def get_loop(self):
|
||||||
|
"""Get the event loop the Server object is attached to."""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def is_serving(self):
|
||||||
|
"""Return True if the server is accepting connections."""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
async def start_serving(self):
|
||||||
|
"""Start accepting connections.
|
||||||
|
|
||||||
|
This method is idempotent, so it can be called when
|
||||||
|
the server is already being serving.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
async def serve_forever(self):
|
||||||
|
"""Start accepting connections until the coroutine is cancelled.
|
||||||
|
|
||||||
|
The server is closed when the coroutine is cancelled.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
async def wait_closed(self):
|
||||||
|
"""Coroutine to wait until service is closed."""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
async def __aenter__(self):
|
||||||
|
return self
|
||||||
|
|
||||||
|
async def __aexit__(self, *exc):
|
||||||
|
self.close()
|
||||||
|
await self.wait_closed()
|
||||||
|
|
||||||
|
|
||||||
|
class AbstractEventLoop:
|
||||||
|
"""Abstract event loop."""
|
||||||
|
|
||||||
|
# Running and stopping the event loop.
|
||||||
|
|
||||||
|
def run_forever(self):
|
||||||
|
"""Run the event loop until stop() is called."""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def run_until_complete(self, future):
|
||||||
|
"""Run the event loop until a Future is done.
|
||||||
|
|
||||||
|
Return the Future's result, or raise its exception.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def stop(self):
|
||||||
|
"""Stop the event loop as soon as reasonable.
|
||||||
|
|
||||||
|
Exactly how soon that is may depend on the implementation, but
|
||||||
|
no more I/O callbacks should be scheduled.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def is_running(self):
|
||||||
|
"""Return whether the event loop is currently running."""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def is_closed(self):
|
||||||
|
"""Returns True if the event loop was closed."""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
"""Close the loop.
|
||||||
|
|
||||||
|
The loop should not be running.
|
||||||
|
|
||||||
|
This is idempotent and irreversible.
|
||||||
|
|
||||||
|
No other methods should be called after this one.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
async def shutdown_asyncgens(self):
|
||||||
|
"""Shutdown all active asynchronous generators."""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
async def shutdown_default_executor(self):
|
||||||
|
"""Schedule the shutdown of the default executor."""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
# Methods scheduling callbacks. All these return Handles.
|
||||||
|
|
||||||
|
def _timer_handle_cancelled(self, handle):
|
||||||
|
"""Notification that a TimerHandle has been cancelled."""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def call_soon(self, callback, *args, context=None):
|
||||||
|
return self.call_later(0, callback, *args, context=context)
|
||||||
|
|
||||||
|
def call_later(self, delay, callback, *args, context=None):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def call_at(self, when, callback, *args, context=None):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def time(self):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def create_future(self):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
# Method scheduling a coroutine object: create a task.
|
||||||
|
|
||||||
|
def create_task(self, coro, *, name=None):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
# Methods for interacting with threads.
|
||||||
|
|
||||||
|
def call_soon_threadsafe(self, callback, *args, context=None):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def run_in_executor(self, executor, func, *args):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def set_default_executor(self, executor):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
# Network I/O methods returning Futures.
|
||||||
|
|
||||||
|
async def getaddrinfo(self, host, port, *,
|
||||||
|
family=0, type=0, proto=0, flags=0):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
async def getnameinfo(self, sockaddr, flags=0):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
async def create_connection(
|
||||||
|
self, protocol_factory, host=None, port=None,
|
||||||
|
*, ssl=None, family=0, proto=0,
|
||||||
|
flags=0, sock=None, local_addr=None,
|
||||||
|
server_hostname=None,
|
||||||
|
ssl_handshake_timeout=None,
|
||||||
|
happy_eyeballs_delay=None, interleave=None):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
async def create_server(
|
||||||
|
self, protocol_factory, host=None, port=None,
|
||||||
|
*, family=socket.AF_UNSPEC,
|
||||||
|
flags=socket.AI_PASSIVE, sock=None, backlog=100,
|
||||||
|
ssl=None, reuse_address=None, reuse_port=None,
|
||||||
|
ssl_handshake_timeout=None,
|
||||||
|
start_serving=True):
|
||||||
|
"""A coroutine which creates a TCP server bound to host and port.
|
||||||
|
|
||||||
|
The return value is a Server object which can be used to stop
|
||||||
|
the service.
|
||||||
|
|
||||||
|
If host is an empty string or None all interfaces are assumed
|
||||||
|
and a list of multiple sockets will be returned (most likely
|
||||||
|
one for IPv4 and another one for IPv6). The host parameter can also be
|
||||||
|
a sequence (e.g. list) of hosts to bind to.
|
||||||
|
|
||||||
|
family can be set to either AF_INET or AF_INET6 to force the
|
||||||
|
socket to use IPv4 or IPv6. If not set it will be determined
|
||||||
|
from host (defaults to AF_UNSPEC).
|
||||||
|
|
||||||
|
flags is a bitmask for getaddrinfo().
|
||||||
|
|
||||||
|
sock can optionally be specified in order to use a preexisting
|
||||||
|
socket object.
|
||||||
|
|
||||||
|
backlog is the maximum number of queued connections passed to
|
||||||
|
listen() (defaults to 100).
|
||||||
|
|
||||||
|
ssl can be set to an SSLContext to enable SSL over the
|
||||||
|
accepted connections.
|
||||||
|
|
||||||
|
reuse_address tells the kernel to reuse a local socket in
|
||||||
|
TIME_WAIT state, without waiting for its natural timeout to
|
||||||
|
expire. If not specified will automatically be set to True on
|
||||||
|
UNIX.
|
||||||
|
|
||||||
|
reuse_port tells the kernel to allow this endpoint to be bound to
|
||||||
|
the same port as other existing endpoints are bound to, so long as
|
||||||
|
they all set this flag when being created. This option is not
|
||||||
|
supported on Windows.
|
||||||
|
|
||||||
|
ssl_handshake_timeout is the time in seconds that an SSL server
|
||||||
|
will wait for completion of the SSL handshake before aborting the
|
||||||
|
connection. Default is 60s.
|
||||||
|
|
||||||
|
start_serving set to True (default) causes the created server
|
||||||
|
to start accepting connections immediately. When set to False,
|
||||||
|
the user should await Server.start_serving() or Server.serve_forever()
|
||||||
|
to make the server to start accepting connections.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
async def sendfile(self, transport, file, offset=0, count=None,
|
||||||
|
*, fallback=True):
|
||||||
|
"""Send a file through a transport.
|
||||||
|
|
||||||
|
Return an amount of sent bytes.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
async def start_tls(self, transport, protocol, sslcontext, *,
|
||||||
|
server_side=False,
|
||||||
|
server_hostname=None,
|
||||||
|
ssl_handshake_timeout=None):
|
||||||
|
"""Upgrade a transport to TLS.
|
||||||
|
|
||||||
|
Return a new transport that *protocol* should start using
|
||||||
|
immediately.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
async def create_unix_connection(
|
||||||
|
self, protocol_factory, path=None, *,
|
||||||
|
ssl=None, sock=None,
|
||||||
|
server_hostname=None,
|
||||||
|
ssl_handshake_timeout=None):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
async def create_unix_server(
|
||||||
|
self, protocol_factory, path=None, *,
|
||||||
|
sock=None, backlog=100, ssl=None,
|
||||||
|
ssl_handshake_timeout=None,
|
||||||
|
start_serving=True):
|
||||||
|
"""A coroutine which creates a UNIX Domain Socket server.
|
||||||
|
|
||||||
|
The return value is a Server object, which can be used to stop
|
||||||
|
the service.
|
||||||
|
|
||||||
|
path is a str, representing a file system path to bind the
|
||||||
|
server socket to.
|
||||||
|
|
||||||
|
sock can optionally be specified in order to use a preexisting
|
||||||
|
socket object.
|
||||||
|
|
||||||
|
backlog is the maximum number of queued connections passed to
|
||||||
|
listen() (defaults to 100).
|
||||||
|
|
||||||
|
ssl can be set to an SSLContext to enable SSL over the
|
||||||
|
accepted connections.
|
||||||
|
|
||||||
|
ssl_handshake_timeout is the time in seconds that an SSL server
|
||||||
|
will wait for the SSL handshake to complete (defaults to 60s).
|
||||||
|
|
||||||
|
start_serving set to True (default) causes the created server
|
||||||
|
to start accepting connections immediately. When set to False,
|
||||||
|
the user should await Server.start_serving() or Server.serve_forever()
|
||||||
|
to make the server to start accepting connections.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
async def connect_accepted_socket(
|
||||||
|
self, protocol_factory, sock,
|
||||||
|
*, ssl=None,
|
||||||
|
ssl_handshake_timeout=None):
|
||||||
|
"""Handle an accepted connection.
|
||||||
|
|
||||||
|
This is used by servers that accept connections outside of
|
||||||
|
asyncio, but use asyncio to handle connections.
|
||||||
|
|
||||||
|
This method is a coroutine. When completed, the coroutine
|
||||||
|
returns a (transport, protocol) pair.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
async def create_datagram_endpoint(self, protocol_factory,
|
||||||
|
local_addr=None, remote_addr=None, *,
|
||||||
|
family=0, proto=0, flags=0,
|
||||||
|
reuse_address=None, reuse_port=None,
|
||||||
|
allow_broadcast=None, sock=None):
|
||||||
|
"""A coroutine which creates a datagram endpoint.
|
||||||
|
|
||||||
|
This method will try to establish the endpoint in the background.
|
||||||
|
When successful, the coroutine returns a (transport, protocol) pair.
|
||||||
|
|
||||||
|
protocol_factory must be a callable returning a protocol instance.
|
||||||
|
|
||||||
|
socket family AF_INET, socket.AF_INET6 or socket.AF_UNIX depending on
|
||||||
|
host (or family if specified), socket type SOCK_DGRAM.
|
||||||
|
|
||||||
|
reuse_address tells the kernel to reuse a local socket in
|
||||||
|
TIME_WAIT state, without waiting for its natural timeout to
|
||||||
|
expire. If not specified it will automatically be set to True on
|
||||||
|
UNIX.
|
||||||
|
|
||||||
|
reuse_port tells the kernel to allow this endpoint to be bound to
|
||||||
|
the same port as other existing endpoints are bound to, so long as
|
||||||
|
they all set this flag when being created. This option is not
|
||||||
|
supported on Windows and some UNIX's. If the
|
||||||
|
:py:data:`~socket.SO_REUSEPORT` constant is not defined then this
|
||||||
|
capability is unsupported.
|
||||||
|
|
||||||
|
allow_broadcast tells the kernel to allow this endpoint to send
|
||||||
|
messages to the broadcast address.
|
||||||
|
|
||||||
|
sock can optionally be specified in order to use a preexisting
|
||||||
|
socket object.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
# Pipes and subprocesses.
|
||||||
|
|
||||||
|
async def connect_read_pipe(self, protocol_factory, pipe):
|
||||||
|
"""Register read pipe in event loop. Set the pipe to non-blocking mode.
|
||||||
|
|
||||||
|
protocol_factory should instantiate object with Protocol interface.
|
||||||
|
pipe is a file-like object.
|
||||||
|
Return pair (transport, protocol), where transport supports the
|
||||||
|
ReadTransport interface."""
|
||||||
|
# The reason to accept file-like object instead of just file descriptor
|
||||||
|
# is: we need to own pipe and close it at transport finishing
|
||||||
|
# Can got complicated errors if pass f.fileno(),
|
||||||
|
# close fd in pipe transport then close f and vice versa.
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
async def connect_write_pipe(self, protocol_factory, pipe):
|
||||||
|
"""Register write pipe in event loop.
|
||||||
|
|
||||||
|
protocol_factory should instantiate object with BaseProtocol interface.
|
||||||
|
Pipe is file-like object already switched to nonblocking.
|
||||||
|
Return pair (transport, protocol), where transport support
|
||||||
|
WriteTransport interface."""
|
||||||
|
# The reason to accept file-like object instead of just file descriptor
|
||||||
|
# is: we need to own pipe and close it at transport finishing
|
||||||
|
# Can got complicated errors if pass f.fileno(),
|
||||||
|
# close fd in pipe transport then close f and vice versa.
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
async def subprocess_shell(self, protocol_factory, cmd, *,
|
||||||
|
stdin=subprocess.PIPE,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.PIPE,
|
||||||
|
**kwargs):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
async def subprocess_exec(self, protocol_factory, *args,
|
||||||
|
stdin=subprocess.PIPE,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.PIPE,
|
||||||
|
**kwargs):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
# Ready-based callback registration methods.
|
||||||
|
# The add_*() methods return None.
|
||||||
|
# The remove_*() methods return True if something was removed,
|
||||||
|
# False if there was nothing to delete.
|
||||||
|
|
||||||
|
def add_reader(self, fd, callback, *args):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def remove_reader(self, fd):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def add_writer(self, fd, callback, *args):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def remove_writer(self, fd):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
# Completion based I/O methods returning Futures.
|
||||||
|
|
||||||
|
async def sock_recv(self, sock, nbytes):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
async def sock_recv_into(self, sock, buf):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
async def sock_sendall(self, sock, data):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
async def sock_connect(self, sock, address):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
async def sock_accept(self, sock):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
async def sock_sendfile(self, sock, file, offset=0, count=None,
|
||||||
|
*, fallback=None):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
# Signal handling.
|
||||||
|
|
||||||
|
def add_signal_handler(self, sig, callback, *args):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def remove_signal_handler(self, sig):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
# Task factory.
|
||||||
|
|
||||||
|
def set_task_factory(self, factory):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def get_task_factory(self):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
# Error handlers.
|
||||||
|
|
||||||
|
def get_exception_handler(self):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def set_exception_handler(self, handler):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def default_exception_handler(self, context):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def call_exception_handler(self, context):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
# Debug flag management.
|
||||||
|
|
||||||
|
def get_debug(self):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def set_debug(self, enabled):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
|
class AbstractEventLoopPolicy:
|
||||||
|
"""Abstract policy for accessing the event loop."""
|
||||||
|
|
||||||
|
def get_event_loop(self):
|
||||||
|
"""Get the event loop for the current context.
|
||||||
|
|
||||||
|
Returns an event loop object implementing the BaseEventLoop interface,
|
||||||
|
or raises an exception in case no event loop has been set for the
|
||||||
|
current context and the current policy does not specify to create one.
|
||||||
|
|
||||||
|
It should never return None."""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def set_event_loop(self, loop):
|
||||||
|
"""Set the event loop for the current context to loop."""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def new_event_loop(self):
|
||||||
|
"""Create and return a new event loop object according to this
|
||||||
|
policy's rules. If there's need to set this loop as the event loop for
|
||||||
|
the current context, set_event_loop must be called explicitly."""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
# Child processes handling (Unix only).
|
||||||
|
|
||||||
|
def get_child_watcher(self):
|
||||||
|
"Get the watcher for child processes."
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def set_child_watcher(self, watcher):
|
||||||
|
"""Set the watcher for child processes."""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
|
class BaseDefaultEventLoopPolicy(AbstractEventLoopPolicy):
|
||||||
|
"""Default policy implementation for accessing the event loop.
|
||||||
|
|
||||||
|
In this policy, each thread has its own event loop. However, we
|
||||||
|
only automatically create an event loop by default for the main
|
||||||
|
thread; other threads by default have no event loop.
|
||||||
|
|
||||||
|
Other policies may have different rules (e.g. a single global
|
||||||
|
event loop, or automatically creating an event loop per thread, or
|
||||||
|
using some other notion of context to which an event loop is
|
||||||
|
associated).
|
||||||
|
"""
|
||||||
|
|
||||||
|
_loop_factory = None
|
||||||
|
|
||||||
|
class _Local(threading.local):
|
||||||
|
_loop = None
|
||||||
|
_set_called = False
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self._local = self._Local()
|
||||||
|
|
||||||
|
def get_event_loop(self):
|
||||||
|
"""Get the event loop for the current context.
|
||||||
|
|
||||||
|
Returns an instance of EventLoop or raises an exception.
|
||||||
|
"""
|
||||||
|
if (self._local._loop is None and
|
||||||
|
not self._local._set_called and
|
||||||
|
threading.current_thread() is threading.main_thread()):
|
||||||
|
self.set_event_loop(self.new_event_loop())
|
||||||
|
|
||||||
|
if self._local._loop is None:
|
||||||
|
raise RuntimeError('There is no current event loop in thread %r.'
|
||||||
|
% threading.current_thread().name)
|
||||||
|
|
||||||
|
return self._local._loop
|
||||||
|
|
||||||
|
def set_event_loop(self, loop):
|
||||||
|
"""Set the event loop."""
|
||||||
|
self._local._set_called = True
|
||||||
|
assert loop is None or isinstance(loop, AbstractEventLoop)
|
||||||
|
self._local._loop = loop
|
||||||
|
|
||||||
|
def new_event_loop(self):
|
||||||
|
"""Create a new event loop.
|
||||||
|
|
||||||
|
You must call set_event_loop() to make this the current event
|
||||||
|
loop.
|
||||||
|
"""
|
||||||
|
return self._loop_factory()
|
||||||
|
|
||||||
|
|
||||||
|
# Event loop policy. The policy itself is always global, even if the
|
||||||
|
# policy's rules say that there is an event loop per thread (or other
|
||||||
|
# notion of context). The default policy is installed by the first
|
||||||
|
# call to get_event_loop_policy().
|
||||||
|
_event_loop_policy = None
|
||||||
|
|
||||||
|
# Lock for protecting the on-the-fly creation of the event loop policy.
|
||||||
|
_lock = threading.Lock()
|
||||||
|
|
||||||
|
|
||||||
|
# A TLS for the running event loop, used by _get_running_loop.
|
||||||
|
class _RunningLoop(threading.local):
|
||||||
|
loop_pid = (None, None)
|
||||||
|
|
||||||
|
|
||||||
|
_running_loop = _RunningLoop()
|
||||||
|
|
||||||
|
|
||||||
|
def get_running_loop():
|
||||||
|
"""Return the running event loop. Raise a RuntimeError if there is none.
|
||||||
|
|
||||||
|
This function is thread-specific.
|
||||||
|
"""
|
||||||
|
# NOTE: this function is implemented in C (see _asynciomodule.c)
|
||||||
|
loop = _get_running_loop()
|
||||||
|
if loop is None:
|
||||||
|
raise RuntimeError('no running event loop')
|
||||||
|
return loop
|
||||||
|
|
||||||
|
|
||||||
|
def _get_running_loop():
|
||||||
|
"""Return the running event loop or None.
|
||||||
|
|
||||||
|
This is a low-level function intended to be used by event loops.
|
||||||
|
This function is thread-specific.
|
||||||
|
"""
|
||||||
|
# NOTE: this function is implemented in C (see _asynciomodule.c)
|
||||||
|
running_loop, pid = _running_loop.loop_pid
|
||||||
|
if running_loop is not None and pid == os.getpid():
|
||||||
|
return running_loop
|
||||||
|
|
||||||
|
|
||||||
|
def _set_running_loop(loop):
|
||||||
|
"""Set the running event loop.
|
||||||
|
|
||||||
|
This is a low-level function intended to be used by event loops.
|
||||||
|
This function is thread-specific.
|
||||||
|
"""
|
||||||
|
# NOTE: this function is implemented in C (see _asynciomodule.c)
|
||||||
|
_running_loop.loop_pid = (loop, os.getpid())
|
||||||
|
|
||||||
|
|
||||||
|
def _init_event_loop_policy():
|
||||||
|
global _event_loop_policy
|
||||||
|
with _lock:
|
||||||
|
if _event_loop_policy is None: # pragma: no branch
|
||||||
|
from . import DefaultEventLoopPolicy
|
||||||
|
_event_loop_policy = DefaultEventLoopPolicy()
|
||||||
|
|
||||||
|
|
||||||
|
def get_event_loop_policy():
|
||||||
|
"""Get the current event loop policy."""
|
||||||
|
if _event_loop_policy is None:
|
||||||
|
_init_event_loop_policy()
|
||||||
|
return _event_loop_policy
|
||||||
|
|
||||||
|
|
||||||
|
def set_event_loop_policy(policy):
|
||||||
|
"""Set the current event loop policy.
|
||||||
|
|
||||||
|
If policy is None, the default policy is restored."""
|
||||||
|
global _event_loop_policy
|
||||||
|
assert policy is None or isinstance(policy, AbstractEventLoopPolicy)
|
||||||
|
_event_loop_policy = policy
|
||||||
|
|
||||||
|
|
||||||
|
def get_event_loop():
|
||||||
|
"""Return an asyncio event loop.
|
||||||
|
|
||||||
|
When called from a coroutine or a callback (e.g. scheduled with call_soon
|
||||||
|
or similar API), this function will always return the running event loop.
|
||||||
|
|
||||||
|
If there is no running event loop set, the function will return
|
||||||
|
the result of `get_event_loop_policy().get_event_loop()` call.
|
||||||
|
"""
|
||||||
|
# NOTE: this function is implemented in C (see _asynciomodule.c)
|
||||||
|
return _py__get_event_loop()
|
||||||
|
|
||||||
|
|
||||||
|
def _get_event_loop(stacklevel=3):
|
||||||
|
current_loop = _get_running_loop()
|
||||||
|
if current_loop is not None:
|
||||||
|
return current_loop
|
||||||
|
import warnings
|
||||||
|
warnings.warn('There is no current event loop',
|
||||||
|
DeprecationWarning, stacklevel=stacklevel)
|
||||||
|
return get_event_loop_policy().get_event_loop()
|
||||||
|
|
||||||
|
|
||||||
|
def set_event_loop(loop):
|
||||||
|
"""Equivalent to calling get_event_loop_policy().set_event_loop(loop)."""
|
||||||
|
get_event_loop_policy().set_event_loop(loop)
|
||||||
|
|
||||||
|
|
||||||
|
def new_event_loop():
|
||||||
|
"""Equivalent to calling get_event_loop_policy().new_event_loop()."""
|
||||||
|
return get_event_loop_policy().new_event_loop()
|
||||||
|
|
||||||
|
|
||||||
|
def get_child_watcher():
|
||||||
|
"""Equivalent to calling get_event_loop_policy().get_child_watcher()."""
|
||||||
|
return get_event_loop_policy().get_child_watcher()
|
||||||
|
|
||||||
|
|
||||||
|
def set_child_watcher(watcher):
|
||||||
|
"""Equivalent to calling
|
||||||
|
get_event_loop_policy().set_child_watcher(watcher)."""
|
||||||
|
return get_event_loop_policy().set_child_watcher(watcher)
|
||||||
|
|
||||||
|
|
||||||
|
# Alias pure-Python implementations for testing purposes.
|
||||||
|
_py__get_running_loop = _get_running_loop
|
||||||
|
_py__set_running_loop = _set_running_loop
|
||||||
|
_py_get_running_loop = get_running_loop
|
||||||
|
_py_get_event_loop = get_event_loop
|
||||||
|
_py__get_event_loop = _get_event_loop
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
# get_event_loop() is one of the most frequently called
|
||||||
|
# functions in asyncio. Pure Python implementation is
|
||||||
|
# about 4 times slower than C-accelerated.
|
||||||
|
from _asyncio import (_get_running_loop, _set_running_loop,
|
||||||
|
get_running_loop, get_event_loop, _get_event_loop)
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
# Alias C implementations for testing purposes.
|
||||||
|
_c__get_running_loop = _get_running_loop
|
||||||
|
_c__set_running_loop = _set_running_loop
|
||||||
|
_c_get_running_loop = get_running_loop
|
||||||
|
_c_get_event_loop = get_event_loop
|
||||||
|
_c__get_event_loop = _get_event_loop
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user