diff --git a/.env b/.env deleted file mode 100644 index 220407e..0000000 --- a/.env +++ /dev/null @@ -1 +0,0 @@ -TOKEN=MTIxNjE0NDY1Nzg2MDI2NDAzNg.G582UU.B1j7X-hr_JKx_jrx4M9dt-c47PAHggt5_AweIo diff --git a/.venv/Lib/site-packages/_distutils_hack/__init__.py b/.venv/Lib/site-packages/_distutils_hack/__init__.py deleted file mode 100644 index f987a53..0000000 --- a/.venv/Lib/site-packages/_distutils_hack/__init__.py +++ /dev/null @@ -1,222 +0,0 @@ -# don't import any costly modules -import sys -import os - - -is_pypy = '__pypy__' in sys.builtin_module_names - - -def warn_distutils_present(): - if 'distutils' not in sys.modules: - return - if is_pypy and sys.version_info < (3, 7): - # PyPy for 3.6 unconditionally imports distutils, so bypass the warning - # https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250 - return - import warnings - - warnings.warn( - "Distutils was imported before Setuptools, but importing Setuptools " - "also replaces the `distutils` module in `sys.modules`. This may lead " - "to undesirable behaviors or errors. To avoid these issues, avoid " - "using distutils directly, ensure that setuptools is installed in the " - "traditional way (e.g. not an editable install), and/or make sure " - "that setuptools is always imported before distutils." - ) - - -def clear_distutils(): - if 'distutils' not in sys.modules: - return - import warnings - - warnings.warn("Setuptools is replacing distutils.") - mods = [ - name - for name in sys.modules - if name == "distutils" or name.startswith("distutils.") - ] - for name in mods: - del sys.modules[name] - - -def enabled(): - """ - Allow selection of distutils by environment variable. - """ - which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'local') - return which == 'local' - - -def ensure_local_distutils(): - import importlib - - clear_distutils() - - # With the DistutilsMetaFinder in place, - # perform an import to cause distutils to be - # loaded from setuptools._distutils. Ref #2906. - with shim(): - importlib.import_module('distutils') - - # check that submodules load as expected - core = importlib.import_module('distutils.core') - assert '_distutils' in core.__file__, core.__file__ - assert 'setuptools._distutils.log' not in sys.modules - - -def do_override(): - """ - Ensure that the local copy of distutils is preferred over stdlib. - - See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401 - for more motivation. - """ - if enabled(): - warn_distutils_present() - ensure_local_distutils() - - -class _TrivialRe: - def __init__(self, *patterns): - self._patterns = patterns - - def match(self, string): - return all(pat in string for pat in self._patterns) - - -class DistutilsMetaFinder: - def find_spec(self, fullname, path, target=None): - # optimization: only consider top level modules and those - # found in the CPython test suite. - if path is not None and not fullname.startswith('test.'): - return - - method_name = 'spec_for_{fullname}'.format(**locals()) - method = getattr(self, method_name, lambda: None) - return method() - - def spec_for_distutils(self): - if self.is_cpython(): - return - - import importlib - import importlib.abc - import importlib.util - - try: - mod = importlib.import_module('setuptools._distutils') - except Exception: - # There are a couple of cases where setuptools._distutils - # may not be present: - # - An older Setuptools without a local distutils is - # taking precedence. Ref #2957. - # - Path manipulation during sitecustomize removes - # setuptools from the path but only after the hook - # has been loaded. Ref #2980. - # In either case, fall back to stdlib behavior. - return - - class DistutilsLoader(importlib.abc.Loader): - def create_module(self, spec): - mod.__name__ = 'distutils' - return mod - - def exec_module(self, module): - pass - - return importlib.util.spec_from_loader( - 'distutils', DistutilsLoader(), origin=mod.__file__ - ) - - @staticmethod - def is_cpython(): - """ - Suppress supplying distutils for CPython (build and tests). - Ref #2965 and #3007. - """ - return os.path.isfile('pybuilddir.txt') - - def spec_for_pip(self): - """ - Ensure stdlib distutils when running under pip. - See pypa/pip#8761 for rationale. - """ - if self.pip_imported_during_build(): - return - clear_distutils() - self.spec_for_distutils = lambda: None - - @classmethod - def pip_imported_during_build(cls): - """ - Detect if pip is being imported in a build script. Ref #2355. - """ - import traceback - - return any( - cls.frame_file_is_setup(frame) for frame, line in traceback.walk_stack(None) - ) - - @staticmethod - def frame_file_is_setup(frame): - """ - Return True if the indicated frame suggests a setup.py file. - """ - # some frames may not have __file__ (#2940) - return frame.f_globals.get('__file__', '').endswith('setup.py') - - def spec_for_sensitive_tests(self): - """ - Ensure stdlib distutils when running select tests under CPython. - - python/cpython#91169 - """ - clear_distutils() - self.spec_for_distutils = lambda: None - - sensitive_tests = ( - [ - 'test.test_distutils', - 'test.test_peg_generator', - 'test.test_importlib', - ] - if sys.version_info < (3, 10) - else [ - 'test.test_distutils', - ] - ) - - -for name in DistutilsMetaFinder.sensitive_tests: - setattr( - DistutilsMetaFinder, - f'spec_for_{name}', - DistutilsMetaFinder.spec_for_sensitive_tests, - ) - - -DISTUTILS_FINDER = DistutilsMetaFinder() - - -def add_shim(): - DISTUTILS_FINDER in sys.meta_path or insert_shim() - - -class shim: - def __enter__(self): - insert_shim() - - def __exit__(self, exc, value, tb): - remove_shim() - - -def insert_shim(): - sys.meta_path.insert(0, DISTUTILS_FINDER) - - -def remove_shim(): - try: - sys.meta_path.remove(DISTUTILS_FINDER) - except ValueError: - pass diff --git a/.venv/Lib/site-packages/_distutils_hack/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/_distutils_hack/__pycache__/__init__.cpython-311.pyc deleted file mode 100644 index a19bf6b..0000000 Binary files a/.venv/Lib/site-packages/_distutils_hack/__pycache__/__init__.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/_distutils_hack/__pycache__/override.cpython-311.pyc b/.venv/Lib/site-packages/_distutils_hack/__pycache__/override.cpython-311.pyc deleted file mode 100644 index 7b71491..0000000 Binary files a/.venv/Lib/site-packages/_distutils_hack/__pycache__/override.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/_distutils_hack/override.py b/.venv/Lib/site-packages/_distutils_hack/override.py deleted file mode 100644 index 2cc433a..0000000 --- a/.venv/Lib/site-packages/_distutils_hack/override.py +++ /dev/null @@ -1 +0,0 @@ -__import__('_distutils_hack').do_override() diff --git a/.venv/Lib/site-packages/aiohttp-3.9.3.dist-info/INSTALLER b/.venv/Lib/site-packages/aiohttp-3.9.3.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/.venv/Lib/site-packages/aiohttp-3.9.3.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/.venv/Lib/site-packages/aiohttp-3.9.3.dist-info/LICENSE.txt b/.venv/Lib/site-packages/aiohttp-3.9.3.dist-info/LICENSE.txt deleted file mode 100644 index e497a32..0000000 --- a/.venv/Lib/site-packages/aiohttp-3.9.3.dist-info/LICENSE.txt +++ /dev/null @@ -1,13 +0,0 @@ - Copyright aio-libs contributors. - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/.venv/Lib/site-packages/aiohttp-3.9.3.dist-info/METADATA b/.venv/Lib/site-packages/aiohttp-3.9.3.dist-info/METADATA deleted file mode 100644 index 8db6d13..0000000 --- a/.venv/Lib/site-packages/aiohttp-3.9.3.dist-info/METADATA +++ /dev/null @@ -1,243 +0,0 @@ -Metadata-Version: 2.1 -Name: aiohttp -Version: 3.9.3 -Summary: Async http client/server framework (asyncio) -Home-page: https://github.com/aio-libs/aiohttp -Maintainer: aiohttp team -Maintainer-email: team@aiohttp.org -License: Apache 2 -Project-URL: Chat: Matrix, https://matrix.to/#/#aio-libs:matrix.org -Project-URL: Chat: Matrix Space, https://matrix.to/#/#aio-libs-space:matrix.org -Project-URL: CI: GitHub Actions, https://github.com/aio-libs/aiohttp/actions?query=workflow%3ACI -Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/aiohttp -Project-URL: Docs: Changelog, https://docs.aiohttp.org/en/stable/changes.html -Project-URL: Docs: RTD, https://docs.aiohttp.org -Project-URL: GitHub: issues, https://github.com/aio-libs/aiohttp/issues -Project-URL: GitHub: repo, https://github.com/aio-libs/aiohttp -Classifier: Development Status :: 5 - Production/Stable -Classifier: Framework :: AsyncIO -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: Apache Software License -Classifier: Operating System :: POSIX -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Operating System :: Microsoft :: Windows -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Topic :: Internet :: WWW/HTTP -Requires-Python: >=3.8 -Description-Content-Type: text/x-rst -License-File: LICENSE.txt -Requires-Dist: aiosignal >=1.1.2 -Requires-Dist: attrs >=17.3.0 -Requires-Dist: frozenlist >=1.1.1 -Requires-Dist: multidict <7.0,>=4.5 -Requires-Dist: yarl <2.0,>=1.0 -Requires-Dist: async-timeout <5.0,>=4.0 ; python_version < "3.11" -Provides-Extra: speedups -Requires-Dist: brotlicffi ; (platform_python_implementation != "CPython") and extra == 'speedups' -Requires-Dist: Brotli ; (platform_python_implementation == "CPython") and extra == 'speedups' -Requires-Dist: aiodns ; (sys_platform == "linux" or sys_platform == "darwin") and extra == 'speedups' - -================================== -Async http client/server framework -================================== - -.. image:: https://raw.githubusercontent.com/aio-libs/aiohttp/master/docs/aiohttp-plain.svg - :height: 64px - :width: 64px - :alt: aiohttp logo - -| - -.. image:: https://github.com/aio-libs/aiohttp/workflows/CI/badge.svg - :target: https://github.com/aio-libs/aiohttp/actions?query=workflow%3ACI - :alt: GitHub Actions status for master branch - -.. image:: https://codecov.io/gh/aio-libs/aiohttp/branch/master/graph/badge.svg - :target: https://codecov.io/gh/aio-libs/aiohttp - :alt: codecov.io status for master branch - -.. image:: https://badge.fury.io/py/aiohttp.svg - :target: https://pypi.org/project/aiohttp - :alt: Latest PyPI package version - -.. image:: https://readthedocs.org/projects/aiohttp/badge/?version=latest - :target: https://docs.aiohttp.org/ - :alt: Latest Read The Docs - -.. image:: https://img.shields.io/matrix/aio-libs:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat - :target: https://matrix.to/#/%23aio-libs:matrix.org - :alt: Matrix Room — #aio-libs:matrix.org - -.. image:: https://img.shields.io/matrix/aio-libs-space:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs-space%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat - :target: https://matrix.to/#/%23aio-libs-space:matrix.org - :alt: Matrix Space — #aio-libs-space:matrix.org - - -Key Features -============ - -- Supports both client and server side of HTTP protocol. -- Supports both client and server Web-Sockets out-of-the-box and avoids - Callback Hell. -- Provides Web-server with middleware and pluggable routing. - - -Getting started -=============== - -Client ------- - -To get something from the web: - -.. code-block:: python - - import aiohttp - import asyncio - - async def main(): - - async with aiohttp.ClientSession() as session: - async with session.get('http://python.org') as response: - - print("Status:", response.status) - print("Content-type:", response.headers['content-type']) - - html = await response.text() - print("Body:", html[:15], "...") - - asyncio.run(main()) - -This prints: - -.. code-block:: - - Status: 200 - Content-type: text/html; charset=utf-8 - Body: ... - -Coming from `requests `_ ? Read `why we need so many lines `_. - -Server ------- - -An example using a simple server: - -.. code-block:: python - - # examples/server_simple.py - from aiohttp import web - - async def handle(request): - name = request.match_info.get('name', "Anonymous") - text = "Hello, " + name - return web.Response(text=text) - - async def wshandle(request): - ws = web.WebSocketResponse() - await ws.prepare(request) - - async for msg in ws: - if msg.type == web.WSMsgType.text: - await ws.send_str("Hello, {}".format(msg.data)) - elif msg.type == web.WSMsgType.binary: - await ws.send_bytes(msg.data) - elif msg.type == web.WSMsgType.close: - break - - return ws - - - app = web.Application() - app.add_routes([web.get('/', handle), - web.get('/echo', wshandle), - web.get('/{name}', handle)]) - - if __name__ == '__main__': - web.run_app(app) - - -Documentation -============= - -https://aiohttp.readthedocs.io/ - - -Demos -===== - -https://github.com/aio-libs/aiohttp-demos - - -External links -============== - -* `Third party libraries - `_ -* `Built with aiohttp - `_ -* `Powered by aiohttp - `_ - -Feel free to make a Pull Request for adding your link to these pages! - - -Communication channels -====================== - -*aio-libs Discussions*: https://github.com/aio-libs/aiohttp/discussions - -*gitter chat* https://gitter.im/aio-libs/Lobby - -We support `Stack Overflow -`_. -Please add *aiohttp* tag to your question there. - -Requirements -============ - -- async-timeout_ -- attrs_ -- multidict_ -- yarl_ -- frozenlist_ - -Optionally you may install the aiodns_ library (highly recommended for sake of speed). - -.. _aiodns: https://pypi.python.org/pypi/aiodns -.. _attrs: https://github.com/python-attrs/attrs -.. _multidict: https://pypi.python.org/pypi/multidict -.. _frozenlist: https://pypi.org/project/frozenlist/ -.. _yarl: https://pypi.python.org/pypi/yarl -.. _async-timeout: https://pypi.python.org/pypi/async_timeout - -License -======= - -``aiohttp`` is offered under the Apache 2 license. - - -Keepsafe -======== - -The aiohttp community would like to thank Keepsafe -(https://www.getkeepsafe.com) for its support in the early days of -the project. - - -Source code -=========== - -The latest developer version is available in a GitHub repository: -https://github.com/aio-libs/aiohttp - -Benchmarks -========== - -If you are interested in efficiency, the AsyncIO community maintains a -list of benchmarks on the official wiki: -https://github.com/python/asyncio/wiki/Benchmarks diff --git a/.venv/Lib/site-packages/aiohttp-3.9.3.dist-info/RECORD b/.venv/Lib/site-packages/aiohttp-3.9.3.dist-info/RECORD deleted file mode 100644 index 714d072..0000000 --- a/.venv/Lib/site-packages/aiohttp-3.9.3.dist-info/RECORD +++ /dev/null @@ -1,119 +0,0 @@ -aiohttp-3.9.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -aiohttp-3.9.3.dist-info/LICENSE.txt,sha256=wUk-nxDVnR-6n53ygAjhVX4zz5-6yM4SY6ozk5goA94,601 -aiohttp-3.9.3.dist-info/METADATA,sha256=bmYqabW6J4U7r7Eg_XyVQZ_l3lltajSs-rrdfsfsAnE,7600 -aiohttp-3.9.3.dist-info/RECORD,, -aiohttp-3.9.3.dist-info/WHEEL,sha256=ircjsfhzblqgSzO8ow7-0pXK-RVqDqNRGQ8F650AUNM,102 -aiohttp-3.9.3.dist-info/top_level.txt,sha256=iv-JIaacmTl-hSho3QmphcKnbRRYx1st47yjz_178Ro,8 -aiohttp/.hash/_cparser.pxd.hash,sha256=dVGMrCmyJM_owqoRLPezK095md0X5R319koTuhUN6DQ,64 -aiohttp/.hash/_find_header.pxd.hash,sha256=W5qRPWDc55gArGZkriI5tztmQHkrdwR6NdQfRQfTxIg,64 -aiohttp/.hash/_helpers.pyi.hash,sha256=bAsxbXsjcZ5gbj1c561GYcRtQ5REXxrCihR-HN0XKPk,64 -aiohttp/.hash/_helpers.pyx.hash,sha256=-DfrN0XUqBhyb8bp2fJQVb1Lo9S1S-psob-7MJBM18c,64 -aiohttp/.hash/_http_parser.pyx.hash,sha256=8fpadR-Mg6RMlNNLGfIYRh56a6SedGbXY5x9A40ciRs,64 -aiohttp/.hash/_http_writer.pyx.hash,sha256=z39c0hUcdud-ZCon2d9bWpxrFMVdW1dvjtCgxW4RDnI,64 -aiohttp/.hash/_websocket.pyx.hash,sha256=90x5ulhWiFtw2wAri2_82Zas5i3iEkJ-flYJK9Xx-SY,64 -aiohttp/.hash/hdrs.py.hash,sha256=QBHPUkJcp8iPZv3ENUbevgpJzljxoP2qwkBeX3nQ82o,64 -aiohttp/__init__.py,sha256=abMZGeBfcDCjr4nPoKt3a1limp8Z7bP2ngpnxEtEKWU,8002 -aiohttp/__pycache__/__init__.cpython-311.pyc,, -aiohttp/__pycache__/abc.cpython-311.pyc,, -aiohttp/__pycache__/base_protocol.cpython-311.pyc,, -aiohttp/__pycache__/client.cpython-311.pyc,, -aiohttp/__pycache__/client_exceptions.cpython-311.pyc,, -aiohttp/__pycache__/client_proto.cpython-311.pyc,, -aiohttp/__pycache__/client_reqrep.cpython-311.pyc,, -aiohttp/__pycache__/client_ws.cpython-311.pyc,, -aiohttp/__pycache__/compression_utils.cpython-311.pyc,, -aiohttp/__pycache__/connector.cpython-311.pyc,, -aiohttp/__pycache__/cookiejar.cpython-311.pyc,, -aiohttp/__pycache__/formdata.cpython-311.pyc,, -aiohttp/__pycache__/hdrs.cpython-311.pyc,, -aiohttp/__pycache__/helpers.cpython-311.pyc,, -aiohttp/__pycache__/http.cpython-311.pyc,, -aiohttp/__pycache__/http_exceptions.cpython-311.pyc,, -aiohttp/__pycache__/http_parser.cpython-311.pyc,, -aiohttp/__pycache__/http_websocket.cpython-311.pyc,, -aiohttp/__pycache__/http_writer.cpython-311.pyc,, -aiohttp/__pycache__/locks.cpython-311.pyc,, -aiohttp/__pycache__/log.cpython-311.pyc,, -aiohttp/__pycache__/multipart.cpython-311.pyc,, -aiohttp/__pycache__/payload.cpython-311.pyc,, -aiohttp/__pycache__/payload_streamer.cpython-311.pyc,, -aiohttp/__pycache__/pytest_plugin.cpython-311.pyc,, -aiohttp/__pycache__/resolver.cpython-311.pyc,, -aiohttp/__pycache__/streams.cpython-311.pyc,, -aiohttp/__pycache__/tcp_helpers.cpython-311.pyc,, -aiohttp/__pycache__/test_utils.cpython-311.pyc,, -aiohttp/__pycache__/tracing.cpython-311.pyc,, -aiohttp/__pycache__/typedefs.cpython-311.pyc,, -aiohttp/__pycache__/web.cpython-311.pyc,, -aiohttp/__pycache__/web_app.cpython-311.pyc,, -aiohttp/__pycache__/web_exceptions.cpython-311.pyc,, -aiohttp/__pycache__/web_fileresponse.cpython-311.pyc,, -aiohttp/__pycache__/web_log.cpython-311.pyc,, -aiohttp/__pycache__/web_middlewares.cpython-311.pyc,, -aiohttp/__pycache__/web_protocol.cpython-311.pyc,, -aiohttp/__pycache__/web_request.cpython-311.pyc,, -aiohttp/__pycache__/web_response.cpython-311.pyc,, -aiohttp/__pycache__/web_routedef.cpython-311.pyc,, -aiohttp/__pycache__/web_runner.cpython-311.pyc,, -aiohttp/__pycache__/web_server.cpython-311.pyc,, -aiohttp/__pycache__/web_urldispatcher.cpython-311.pyc,, -aiohttp/__pycache__/web_ws.cpython-311.pyc,, -aiohttp/__pycache__/worker.cpython-311.pyc,, -aiohttp/_cparser.pxd,sha256=W6-cu0SyHhOEPeb475NvxagQ1Jz9pWqyZJvwEqTLNs0,4476 -aiohttp/_find_header.pxd,sha256=BFUSmxhemBtblqxzjzH3x03FfxaWlTyuAIOz8YZ5_nM,70 -aiohttp/_headers.pxi,sha256=1MhCe6Un_KI1tpO85HnDfzVO94BhcirLanAOys5FIHA,2090 -aiohttp/_helpers.cp311-win_amd64.pyd,sha256=wNr6g-bLcKOZAozp0fIJMaWX4RqIVOL92x_ZASnW2lU,54272 -aiohttp/_helpers.pyi,sha256=2Hd5IC0Zf4YTEJ412suyyhsh1kVyVDv5g4stgyo2Ksc,208 -aiohttp/_helpers.pyx,sha256=tgl7fZh0QMT6cjf4jSJ8iaO6DdQD3GON2-SH4N5_ETg,1084 -aiohttp/_http_parser.cp311-win_amd64.pyd,sha256=uoQFgIoUgly8TEGWjiHa4L3_963MMEB8JN5F29tsEko,255488 -aiohttp/_http_parser.pyx,sha256=OcojgALQ4EHKQ4VnTud53Wd2t7cH1KCDfBESMqwdO7o,28894 -aiohttp/_http_writer.cp311-win_amd64.pyd,sha256=KfdngH6lqaRjy-wfTNW0NeDpJZ9-QyBt7BXmxxOTUvg,49152 -aiohttp/_http_writer.pyx,sha256=8CBLytO2rx1kdpWe9HYSznhLXdeZWyE-3xI7jaGasag,4738 -aiohttp/_websocket.cp311-win_amd64.pyd,sha256=yT7KsjodDRCIqjuMYnPKTuLflZPTwWfRPft5v8FVtg4,36352 -aiohttp/_websocket.pyx,sha256=o9J7yi9c2-jTBjE3dUkXxhDWKvRWJz5GZfyLsgJQa38,1617 -aiohttp/abc.py,sha256=wpbkcMsLWB_r_sD35PesqbWFY3tZOvilOjqSqHDUaMQ,5709 -aiohttp/base_protocol.py,sha256=u4ITEnXHJ88gNDngHxiU01ZPQhMy_m2eQTJx0cqwvXA,2831 -aiohttp/client.py,sha256=O5ef1NVAo0-Am0JrOPp6UIMkIK1EVJZ_FCb7qrd3DdE,48639 -aiohttp/client_exceptions.py,sha256=BiReSs5jdjdmhB99vYNBcypsuRfyQU2UQXG0cVRAD2A,9757 -aiohttp/client_proto.py,sha256=2_mvTWubu1aS9UU-JOhI_ABpSnVPIOxb-Dcz0-RbtHc,8915 -aiohttp/client_reqrep.py,sha256=TZ-lZ2QBFDJt2-fq_MPyZFEY-yu4awcz6SgZEiEBg_g,40960 -aiohttp/client_ws.py,sha256=mi8iVYQR25Hi20AQing6T1BZBcO24NsQIKubhsR8izM,11325 -aiohttp/compression_utils.py,sha256=OIQOhFq_YssPx-SNCn9UkQTqntHhZKZakhwUQfaJSyA,5172 -aiohttp/connector.py,sha256=6uVBggjuvD5y1i66674TRghZjXuMWBTGG8ao1CELvVc,54287 -aiohttp/cookiejar.py,sha256=vgjrRISdZ5jwGfJC6T9QLuW-f56KfpGrtnrcbsUHkU0,14434 -aiohttp/formdata.py,sha256=iySnD63XJwo4l1TT_KZiJGNzzmn0RtvvF2_FnJM32Uo,6278 -aiohttp/hdrs.py,sha256=_JN4MBE-UoBXGWGoSCKhIviTRc2IXS4fyk5nnuox0Ak,4721 -aiohttp/helpers.py,sha256=or4ZTfv64_hNgSygxXCWU5sbvsM1YvP6M4S4c-eCLeU,31254 -aiohttp/http.py,sha256=DGKcwDbgIMpasv7s2jeKCRuixyj7W-RIrihRFjj0xcY,1914 -aiohttp/http_exceptions.py,sha256=GJYn38j4sI4KdUh993VnZlbgVHOUNI_Z9-ASDTjl5aU,2822 -aiohttp/http_parser.py,sha256=-_dJ9GGj37sMFnaNDH59p2mTku2rbVR_MLV9J4N5qhc,36509 -aiohttp/http_websocket.py,sha256=QXRR8XWtGwie7XuOw1eaUOQIUDvPrRq6s56lePn9xbM,27456 -aiohttp/http_writer.py,sha256=p8H39HhtilQEE90njvtJHc94Am95zjHNoS8T1JcNXJc,6131 -aiohttp/locks.py,sha256=vp1Z4zx0SvooSffw88dkZ-7qpk2CqRf5vWh2dpKagTA,1177 -aiohttp/log.py,sha256=zYUTvXsMQ9Sz1yNN8kXwd5Qxu49a1FzjZ_wQqriEc8M,333 -aiohttp/multipart.py,sha256=DF5PjzlZHrfHRlCp6o4nhd6el8Ff0xiBCvJXJm1o4Wo,33441 -aiohttp/payload.py,sha256=Ap0E4_p1d9E2UTTIYmBe5HXz05Tt7TN5nZ2zfYDtkr0,14005 -aiohttp/payload_streamer.py,sha256=rBb3jAFcwAK1QOgbhya2y4zGjhT11oQrepdcffA1_jM,2162 -aiohttp/py.typed,sha256=3VVwXUAWVEVX7sDwyYDnW5ZdBC9_Z9AJAFfLCleUW0k,8 -aiohttp/pytest_plugin.py,sha256=fJxoTu3NI1wDjFIV1FDmx0oGdRU7r6baovTwJneR6J4,11986 -aiohttp/resolver.py,sha256=k5cVNWiiCHqKDGko7UZNu2y-j6SrU7vQBx-omwHPhso,5230 -aiohttp/streams.py,sha256=Yy5Sj8ewDvbQTKe2BGnUGmCcj0bbl0EeKBT3uMK4K6M,21502 -aiohttp/tcp_helpers.py,sha256=K-hhGh3jd6qCEnHJo8LvFyfJwBjh99UKI7A0aSRVhj4,998 -aiohttp/test_utils.py,sha256=j2M78YyBz12p8tzcR-EiSZO_NlDK8vKkRzus4enyWYg,20860 -aiohttp/tracing.py,sha256=0EccU7PYykvNwd75SNFcuQ1I57RdSu2jDYIvNK2BW6c,15603 -aiohttp/typedefs.py,sha256=x7HBHDU2IlRZZb7ketOdc2Js0MLx53agxk0UnLxNFw4,1525 -aiohttp/web.py,sha256=8rghTkpERz14vRQA4oyXfGZLWMIBMrrHXxcvtZZ_fAU,19879 -aiohttp/web_app.py,sha256=0bajIxV0xb5AB9TChRsoVP8ytTLiY_TU7zrQ66GHNus,18907 -aiohttp/web_exceptions.py,sha256=itNRhCMDJFhnMWftr5SyTsoqh-i0n9rzTj0sjcAEUjo,10812 -aiohttp/web_fileresponse.py,sha256=dnldRvrC8tDrvTkHNru3Y9LJWKwEGclhlglp1OuNQ5c,11717 -aiohttp/web_log.py,sha256=w81HIudhfSxfodo2Fjkok7jWT56XXIrVMJN6ihYnLo0,8014 -aiohttp/web_middlewares.py,sha256=rYWtxDZ2AM3C2FvNuNyffpfmMfcHrxkSZMaTcsG1T_Q,4148 -aiohttp/web_protocol.py,sha256=nocQ-qrtl8XoZBtovtMOIg5Pk5wCHsH_sSy_Uy71VTI,23742 -aiohttp/web_request.py,sha256=C9uGSdocM-1w8UXtpN23fKiUPJ2jx_3W_3quhbVNYtE,29654 -aiohttp/web_response.py,sha256=1RWBY6v8qFHCcRyyJIImreSuDDFcFC-eBW1uDBn8Wko,28546 -aiohttp/web_routedef.py,sha256=7ZribqwusXb1s0T2vLj1roFne1fdz_ZsudBwRyiwQxM,6348 -aiohttp/web_runner.py,sha256=PUmmc3deHj1Iae2PYlPvSedD2dLTXgbumRjRl8QGLOc,12142 -aiohttp/web_server.py,sha256=kOlImrScEbvkGHG7i-N-7eqf55f2zC_J2BZcJanGGmU,2664 -aiohttp/web_urldispatcher.py,sha256=JqFCsCKBvnFXveUzzxUzyhV4LwbfysY8KAVGq5_82b0,41289 -aiohttp/web_ws.py,sha256=b7oHcrYINTwgQOcAnC7LgEG7R5J_7krpSQ-mZu0oo3U,19176 -aiohttp/worker.py,sha256=vDMxlk-Mo3rzN4yubw2-c8T6yg7PRY8Mv0NLuRm8lWw,8212 diff --git a/.venv/Lib/site-packages/aiohttp-3.9.3.dist-info/WHEEL b/.venv/Lib/site-packages/aiohttp-3.9.3.dist-info/WHEEL deleted file mode 100644 index d60b004..0000000 --- a/.venv/Lib/site-packages/aiohttp-3.9.3.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.42.0) -Root-Is-Purelib: false -Tag: cp311-cp311-win_amd64 - diff --git a/.venv/Lib/site-packages/aiohttp-3.9.3.dist-info/top_level.txt b/.venv/Lib/site-packages/aiohttp-3.9.3.dist-info/top_level.txt deleted file mode 100644 index ee4ba4f..0000000 --- a/.venv/Lib/site-packages/aiohttp-3.9.3.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -aiohttp diff --git a/.venv/Lib/site-packages/aiohttp/.hash/_cparser.pxd.hash b/.venv/Lib/site-packages/aiohttp/.hash/_cparser.pxd.hash deleted file mode 100644 index 1dc9b9a..0000000 --- a/.venv/Lib/site-packages/aiohttp/.hash/_cparser.pxd.hash +++ /dev/null @@ -1 +0,0 @@ -5baf9cbb44b21e13843de6f8ef936fc5a810d49cfda56ab2649bf012a4cb36cd \ No newline at end of file diff --git a/.venv/Lib/site-packages/aiohttp/.hash/_find_header.pxd.hash b/.venv/Lib/site-packages/aiohttp/.hash/_find_header.pxd.hash deleted file mode 100644 index ab9d476..0000000 --- a/.venv/Lib/site-packages/aiohttp/.hash/_find_header.pxd.hash +++ /dev/null @@ -1 +0,0 @@ -0455129b185e981b5b96ac738f31f7c74dc57f1696953cae0083b3f18679fe73 \ No newline at end of file diff --git a/.venv/Lib/site-packages/aiohttp/.hash/_helpers.pyi.hash b/.venv/Lib/site-packages/aiohttp/.hash/_helpers.pyi.hash deleted file mode 100644 index c304d37..0000000 --- a/.venv/Lib/site-packages/aiohttp/.hash/_helpers.pyi.hash +++ /dev/null @@ -1 +0,0 @@ -d87779202d197f8613109e35dacbb2ca1b21d64572543bf9838b2d832a362ac7 \ No newline at end of file diff --git a/.venv/Lib/site-packages/aiohttp/.hash/_helpers.pyx.hash b/.venv/Lib/site-packages/aiohttp/.hash/_helpers.pyx.hash deleted file mode 100644 index 8164dbb..0000000 --- a/.venv/Lib/site-packages/aiohttp/.hash/_helpers.pyx.hash +++ /dev/null @@ -1 +0,0 @@ -b6097b7d987440c4fa7237f88d227c89a3ba0dd403dc638ddbe487e0de7f1138 \ No newline at end of file diff --git a/.venv/Lib/site-packages/aiohttp/.hash/_http_parser.pyx.hash b/.venv/Lib/site-packages/aiohttp/.hash/_http_parser.pyx.hash deleted file mode 100644 index 663750d..0000000 --- a/.venv/Lib/site-packages/aiohttp/.hash/_http_parser.pyx.hash +++ /dev/null @@ -1 +0,0 @@ -39ca238002d0e041ca4385674ee779dd6776b7b707d4a0837c111232ac1d3bba \ No newline at end of file diff --git a/.venv/Lib/site-packages/aiohttp/.hash/_http_writer.pyx.hash b/.venv/Lib/site-packages/aiohttp/.hash/_http_writer.pyx.hash deleted file mode 100644 index 10d8347..0000000 --- a/.venv/Lib/site-packages/aiohttp/.hash/_http_writer.pyx.hash +++ /dev/null @@ -1 +0,0 @@ -f0204bcad3b6af1d6476959ef47612ce784b5dd7995b213edf123b8da19ab1a8 \ No newline at end of file diff --git a/.venv/Lib/site-packages/aiohttp/.hash/_websocket.pyx.hash b/.venv/Lib/site-packages/aiohttp/.hash/_websocket.pyx.hash deleted file mode 100644 index 511f26f..0000000 --- a/.venv/Lib/site-packages/aiohttp/.hash/_websocket.pyx.hash +++ /dev/null @@ -1 +0,0 @@ -a3d27bca2f5cdbe8d3063137754917c610d62af456273e4665fc8bb202506b7f \ No newline at end of file diff --git a/.venv/Lib/site-packages/aiohttp/.hash/hdrs.py.hash b/.venv/Lib/site-packages/aiohttp/.hash/hdrs.py.hash deleted file mode 100644 index b032e6e..0000000 --- a/.venv/Lib/site-packages/aiohttp/.hash/hdrs.py.hash +++ /dev/null @@ -1 +0,0 @@ -fc937830113e5280571961a84822a122f89345cd885d2e1fca4e679eea31d009 \ No newline at end of file diff --git a/.venv/Lib/site-packages/aiohttp/__init__.py b/.venv/Lib/site-packages/aiohttp/__init__.py deleted file mode 100644 index 12209de..0000000 --- a/.venv/Lib/site-packages/aiohttp/__init__.py +++ /dev/null @@ -1,240 +0,0 @@ -__version__ = "3.9.3" - -from typing import TYPE_CHECKING, Tuple - -from . import hdrs as hdrs -from .client import ( - BaseConnector as BaseConnector, - ClientConnectionError as ClientConnectionError, - ClientConnectorCertificateError as ClientConnectorCertificateError, - ClientConnectorError as ClientConnectorError, - ClientConnectorSSLError as ClientConnectorSSLError, - ClientError as ClientError, - ClientHttpProxyError as ClientHttpProxyError, - ClientOSError as ClientOSError, - ClientPayloadError as ClientPayloadError, - ClientProxyConnectionError as ClientProxyConnectionError, - ClientRequest as ClientRequest, - ClientResponse as ClientResponse, - ClientResponseError as ClientResponseError, - ClientSession as ClientSession, - ClientSSLError as ClientSSLError, - ClientTimeout as ClientTimeout, - ClientWebSocketResponse as ClientWebSocketResponse, - ContentTypeError as ContentTypeError, - Fingerprint as Fingerprint, - InvalidURL as InvalidURL, - NamedPipeConnector as NamedPipeConnector, - RequestInfo as RequestInfo, - ServerConnectionError as ServerConnectionError, - ServerDisconnectedError as ServerDisconnectedError, - ServerFingerprintMismatch as ServerFingerprintMismatch, - ServerTimeoutError as ServerTimeoutError, - TCPConnector as TCPConnector, - TooManyRedirects as TooManyRedirects, - UnixConnector as UnixConnector, - WSServerHandshakeError as WSServerHandshakeError, - request as request, -) -from .cookiejar import CookieJar as CookieJar, DummyCookieJar as DummyCookieJar -from .formdata import FormData as FormData -from .helpers import BasicAuth, ChainMapProxy, ETag -from .http import ( - HttpVersion as HttpVersion, - HttpVersion10 as HttpVersion10, - HttpVersion11 as HttpVersion11, - WebSocketError as WebSocketError, - WSCloseCode as WSCloseCode, - WSMessage as WSMessage, - WSMsgType as WSMsgType, -) -from .multipart import ( - BadContentDispositionHeader as BadContentDispositionHeader, - BadContentDispositionParam as BadContentDispositionParam, - BodyPartReader as BodyPartReader, - MultipartReader as MultipartReader, - MultipartWriter as MultipartWriter, - content_disposition_filename as content_disposition_filename, - parse_content_disposition as parse_content_disposition, -) -from .payload import ( - PAYLOAD_REGISTRY as PAYLOAD_REGISTRY, - AsyncIterablePayload as AsyncIterablePayload, - BufferedReaderPayload as BufferedReaderPayload, - BytesIOPayload as BytesIOPayload, - BytesPayload as BytesPayload, - IOBasePayload as IOBasePayload, - JsonPayload as JsonPayload, - Payload as Payload, - StringIOPayload as StringIOPayload, - StringPayload as StringPayload, - TextIOPayload as TextIOPayload, - get_payload as get_payload, - payload_type as payload_type, -) -from .payload_streamer import streamer as streamer -from .resolver import ( - AsyncResolver as AsyncResolver, - DefaultResolver as DefaultResolver, - ThreadedResolver as ThreadedResolver, -) -from .streams import ( - EMPTY_PAYLOAD as EMPTY_PAYLOAD, - DataQueue as DataQueue, - EofStream as EofStream, - FlowControlDataQueue as FlowControlDataQueue, - StreamReader as StreamReader, -) -from .tracing import ( - TraceConfig as TraceConfig, - TraceConnectionCreateEndParams as TraceConnectionCreateEndParams, - TraceConnectionCreateStartParams as TraceConnectionCreateStartParams, - TraceConnectionQueuedEndParams as TraceConnectionQueuedEndParams, - TraceConnectionQueuedStartParams as TraceConnectionQueuedStartParams, - TraceConnectionReuseconnParams as TraceConnectionReuseconnParams, - TraceDnsCacheHitParams as TraceDnsCacheHitParams, - TraceDnsCacheMissParams as TraceDnsCacheMissParams, - TraceDnsResolveHostEndParams as TraceDnsResolveHostEndParams, - TraceDnsResolveHostStartParams as TraceDnsResolveHostStartParams, - TraceRequestChunkSentParams as TraceRequestChunkSentParams, - TraceRequestEndParams as TraceRequestEndParams, - TraceRequestExceptionParams as TraceRequestExceptionParams, - TraceRequestRedirectParams as TraceRequestRedirectParams, - TraceRequestStartParams as TraceRequestStartParams, - TraceResponseChunkReceivedParams as TraceResponseChunkReceivedParams, -) - -if TYPE_CHECKING: - # At runtime these are lazy-loaded at the bottom of the file. - from .worker import ( - GunicornUVLoopWebWorker as GunicornUVLoopWebWorker, - GunicornWebWorker as GunicornWebWorker, - ) - -__all__: Tuple[str, ...] = ( - "hdrs", - # client - "BaseConnector", - "ClientConnectionError", - "ClientConnectorCertificateError", - "ClientConnectorError", - "ClientConnectorSSLError", - "ClientError", - "ClientHttpProxyError", - "ClientOSError", - "ClientPayloadError", - "ClientProxyConnectionError", - "ClientResponse", - "ClientRequest", - "ClientResponseError", - "ClientSSLError", - "ClientSession", - "ClientTimeout", - "ClientWebSocketResponse", - "ContentTypeError", - "Fingerprint", - "InvalidURL", - "RequestInfo", - "ServerConnectionError", - "ServerDisconnectedError", - "ServerFingerprintMismatch", - "ServerTimeoutError", - "TCPConnector", - "TooManyRedirects", - "UnixConnector", - "NamedPipeConnector", - "WSServerHandshakeError", - "request", - # cookiejar - "CookieJar", - "DummyCookieJar", - # formdata - "FormData", - # helpers - "BasicAuth", - "ChainMapProxy", - "ETag", - # http - "HttpVersion", - "HttpVersion10", - "HttpVersion11", - "WSMsgType", - "WSCloseCode", - "WSMessage", - "WebSocketError", - # multipart - "BadContentDispositionHeader", - "BadContentDispositionParam", - "BodyPartReader", - "MultipartReader", - "MultipartWriter", - "content_disposition_filename", - "parse_content_disposition", - # payload - "AsyncIterablePayload", - "BufferedReaderPayload", - "BytesIOPayload", - "BytesPayload", - "IOBasePayload", - "JsonPayload", - "PAYLOAD_REGISTRY", - "Payload", - "StringIOPayload", - "StringPayload", - "TextIOPayload", - "get_payload", - "payload_type", - # payload_streamer - "streamer", - # resolver - "AsyncResolver", - "DefaultResolver", - "ThreadedResolver", - # streams - "DataQueue", - "EMPTY_PAYLOAD", - "EofStream", - "FlowControlDataQueue", - "StreamReader", - # tracing - "TraceConfig", - "TraceConnectionCreateEndParams", - "TraceConnectionCreateStartParams", - "TraceConnectionQueuedEndParams", - "TraceConnectionQueuedStartParams", - "TraceConnectionReuseconnParams", - "TraceDnsCacheHitParams", - "TraceDnsCacheMissParams", - "TraceDnsResolveHostEndParams", - "TraceDnsResolveHostStartParams", - "TraceRequestChunkSentParams", - "TraceRequestEndParams", - "TraceRequestExceptionParams", - "TraceRequestRedirectParams", - "TraceRequestStartParams", - "TraceResponseChunkReceivedParams", - # workers (imported lazily with __getattr__) - "GunicornUVLoopWebWorker", - "GunicornWebWorker", -) - - -def __dir__() -> Tuple[str, ...]: - return __all__ + ("__author__", "__doc__") - - -def __getattr__(name: str) -> object: - global GunicornUVLoopWebWorker, GunicornWebWorker - - # Importing gunicorn takes a long time (>100ms), so only import if actually needed. - if name in ("GunicornUVLoopWebWorker", "GunicornWebWorker"): - try: - from .worker import GunicornUVLoopWebWorker as guv, GunicornWebWorker as gw - except ImportError: - return None - - GunicornUVLoopWebWorker = guv # type: ignore[misc] - GunicornWebWorker = gw # type: ignore[misc] - return guv if name == "GunicornUVLoopWebWorker" else gw - - raise AttributeError(f"module {__name__} has no attribute {name}") diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/__init__.cpython-311.pyc deleted file mode 100644 index 5b010e2..0000000 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/__init__.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/abc.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/abc.cpython-311.pyc deleted file mode 100644 index 026c9b2..0000000 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/abc.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/base_protocol.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/base_protocol.cpython-311.pyc deleted file mode 100644 index 4df0407..0000000 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/base_protocol.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/client.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/client.cpython-311.pyc deleted file mode 100644 index b20ca9c..0000000 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/client.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/client_exceptions.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/client_exceptions.cpython-311.pyc deleted file mode 100644 index 169899c..0000000 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/client_exceptions.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/client_proto.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/client_proto.cpython-311.pyc deleted file mode 100644 index 4ad3ce1..0000000 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/client_proto.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/client_reqrep.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/client_reqrep.cpython-311.pyc deleted file mode 100644 index eea6849..0000000 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/client_reqrep.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/client_ws.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/client_ws.cpython-311.pyc deleted file mode 100644 index 5582d8b..0000000 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/client_ws.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/compression_utils.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/compression_utils.cpython-311.pyc deleted file mode 100644 index 57c8632..0000000 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/compression_utils.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/connector.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/connector.cpython-311.pyc deleted file mode 100644 index 501f59d..0000000 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/connector.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/cookiejar.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/cookiejar.cpython-311.pyc deleted file mode 100644 index 624dd00..0000000 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/cookiejar.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/formdata.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/formdata.cpython-311.pyc deleted file mode 100644 index 5d21633..0000000 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/formdata.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/hdrs.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/hdrs.cpython-311.pyc deleted file mode 100644 index 21e9179..0000000 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/hdrs.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/helpers.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/helpers.cpython-311.pyc deleted file mode 100644 index d4e3073..0000000 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/helpers.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/http.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/http.cpython-311.pyc deleted file mode 100644 index 9c8b99a..0000000 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/http.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/http_exceptions.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/http_exceptions.cpython-311.pyc deleted file mode 100644 index 4c61252..0000000 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/http_exceptions.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/http_parser.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/http_parser.cpython-311.pyc deleted file mode 100644 index b48427e..0000000 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/http_parser.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/http_websocket.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/http_websocket.cpython-311.pyc deleted file mode 100644 index d7f6f64..0000000 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/http_websocket.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/http_writer.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/http_writer.cpython-311.pyc deleted file mode 100644 index 1a75e83..0000000 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/http_writer.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/locks.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/locks.cpython-311.pyc deleted file mode 100644 index 7dd2525..0000000 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/locks.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/log.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/log.cpython-311.pyc deleted file mode 100644 index 24859d5..0000000 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/log.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/multipart.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/multipart.cpython-311.pyc deleted file mode 100644 index d0e2e79..0000000 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/multipart.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/payload.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/payload.cpython-311.pyc deleted file mode 100644 index 3bf8287..0000000 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/payload.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/payload_streamer.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/payload_streamer.cpython-311.pyc deleted file mode 100644 index c7e528f..0000000 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/payload_streamer.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/pytest_plugin.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/pytest_plugin.cpython-311.pyc deleted file mode 100644 index 8d0c187..0000000 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/pytest_plugin.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/resolver.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/resolver.cpython-311.pyc deleted file mode 100644 index 53fc990..0000000 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/resolver.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/streams.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/streams.cpython-311.pyc deleted file mode 100644 index 72181c4..0000000 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/streams.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/tcp_helpers.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/tcp_helpers.cpython-311.pyc deleted file mode 100644 index 5d8d45b..0000000 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/tcp_helpers.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/test_utils.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/test_utils.cpython-311.pyc deleted file mode 100644 index 5aeaa6f..0000000 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/test_utils.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/tracing.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/tracing.cpython-311.pyc deleted file mode 100644 index a708b55..0000000 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/tracing.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/typedefs.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/typedefs.cpython-311.pyc deleted file mode 100644 index c068447..0000000 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/typedefs.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/web.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/web.cpython-311.pyc deleted file mode 100644 index 73b7cc4..0000000 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/web.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/web_app.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/web_app.cpython-311.pyc deleted file mode 100644 index 98a74dc..0000000 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/web_app.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/web_exceptions.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/web_exceptions.cpython-311.pyc deleted file mode 100644 index b40f6d9..0000000 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/web_exceptions.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/web_fileresponse.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/web_fileresponse.cpython-311.pyc deleted file mode 100644 index 8aab572..0000000 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/web_fileresponse.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/web_log.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/web_log.cpython-311.pyc deleted file mode 100644 index 5896ce3..0000000 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/web_log.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/web_middlewares.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/web_middlewares.cpython-311.pyc deleted file mode 100644 index 3956e3b..0000000 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/web_middlewares.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/web_protocol.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/web_protocol.cpython-311.pyc deleted file mode 100644 index 1733c92..0000000 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/web_protocol.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/web_request.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/web_request.cpython-311.pyc deleted file mode 100644 index 249b3bd..0000000 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/web_request.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/web_response.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/web_response.cpython-311.pyc deleted file mode 100644 index b80db83..0000000 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/web_response.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/web_routedef.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/web_routedef.cpython-311.pyc deleted file mode 100644 index 8a15061..0000000 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/web_routedef.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/web_runner.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/web_runner.cpython-311.pyc deleted file mode 100644 index 3fd0f21..0000000 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/web_runner.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/web_server.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/web_server.cpython-311.pyc deleted file mode 100644 index 28b8ebf..0000000 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/web_server.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/web_urldispatcher.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/web_urldispatcher.cpython-311.pyc deleted file mode 100644 index 21e2204..0000000 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/web_urldispatcher.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/web_ws.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/web_ws.cpython-311.pyc deleted file mode 100644 index dc26515..0000000 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/web_ws.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/aiohttp/__pycache__/worker.cpython-311.pyc b/.venv/Lib/site-packages/aiohttp/__pycache__/worker.cpython-311.pyc deleted file mode 100644 index eadbc61..0000000 Binary files a/.venv/Lib/site-packages/aiohttp/__pycache__/worker.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/aiohttp/_cparser.pxd b/.venv/Lib/site-packages/aiohttp/_cparser.pxd deleted file mode 100644 index c2cd5a9..0000000 --- a/.venv/Lib/site-packages/aiohttp/_cparser.pxd +++ /dev/null @@ -1,158 +0,0 @@ -from libc.stdint cimport int32_t, uint8_t, uint16_t, uint64_t - - -cdef extern from "../vendor/llhttp/build/llhttp.h": - - struct llhttp__internal_s: - int32_t _index - void* _span_pos0 - void* _span_cb0 - int32_t error - const char* reason - const char* error_pos - void* data - void* _current - uint64_t content_length - uint8_t type - uint8_t method - uint8_t http_major - uint8_t http_minor - uint8_t header_state - uint8_t lenient_flags - uint8_t upgrade - uint8_t finish - uint16_t flags - uint16_t status_code - void* settings - - ctypedef llhttp__internal_s llhttp__internal_t - ctypedef llhttp__internal_t llhttp_t - - ctypedef int (*llhttp_data_cb)(llhttp_t*, const char *at, size_t length) except -1 - ctypedef int (*llhttp_cb)(llhttp_t*) except -1 - - struct llhttp_settings_s: - llhttp_cb on_message_begin - llhttp_data_cb on_url - llhttp_data_cb on_status - llhttp_data_cb on_header_field - llhttp_data_cb on_header_value - llhttp_cb on_headers_complete - llhttp_data_cb on_body - llhttp_cb on_message_complete - llhttp_cb on_chunk_header - llhttp_cb on_chunk_complete - - llhttp_cb on_url_complete - llhttp_cb on_status_complete - llhttp_cb on_header_field_complete - llhttp_cb on_header_value_complete - - ctypedef llhttp_settings_s llhttp_settings_t - - enum llhttp_errno: - HPE_OK, - HPE_INTERNAL, - HPE_STRICT, - HPE_LF_EXPECTED, - HPE_UNEXPECTED_CONTENT_LENGTH, - HPE_CLOSED_CONNECTION, - HPE_INVALID_METHOD, - HPE_INVALID_URL, - HPE_INVALID_CONSTANT, - HPE_INVALID_VERSION, - HPE_INVALID_HEADER_TOKEN, - HPE_INVALID_CONTENT_LENGTH, - HPE_INVALID_CHUNK_SIZE, - HPE_INVALID_STATUS, - HPE_INVALID_EOF_STATE, - HPE_INVALID_TRANSFER_ENCODING, - HPE_CB_MESSAGE_BEGIN, - HPE_CB_HEADERS_COMPLETE, - HPE_CB_MESSAGE_COMPLETE, - HPE_CB_CHUNK_HEADER, - HPE_CB_CHUNK_COMPLETE, - HPE_PAUSED, - HPE_PAUSED_UPGRADE, - HPE_USER - - ctypedef llhttp_errno llhttp_errno_t - - enum llhttp_flags: - F_CHUNKED, - F_CONTENT_LENGTH - - enum llhttp_type: - HTTP_REQUEST, - HTTP_RESPONSE, - HTTP_BOTH - - enum llhttp_method: - HTTP_DELETE, - HTTP_GET, - HTTP_HEAD, - HTTP_POST, - HTTP_PUT, - HTTP_CONNECT, - HTTP_OPTIONS, - HTTP_TRACE, - HTTP_COPY, - HTTP_LOCK, - HTTP_MKCOL, - HTTP_MOVE, - HTTP_PROPFIND, - HTTP_PROPPATCH, - HTTP_SEARCH, - HTTP_UNLOCK, - HTTP_BIND, - HTTP_REBIND, - HTTP_UNBIND, - HTTP_ACL, - HTTP_REPORT, - HTTP_MKACTIVITY, - HTTP_CHECKOUT, - HTTP_MERGE, - HTTP_MSEARCH, - HTTP_NOTIFY, - HTTP_SUBSCRIBE, - HTTP_UNSUBSCRIBE, - HTTP_PATCH, - HTTP_PURGE, - HTTP_MKCALENDAR, - HTTP_LINK, - HTTP_UNLINK, - HTTP_SOURCE, - HTTP_PRI, - HTTP_DESCRIBE, - HTTP_ANNOUNCE, - HTTP_SETUP, - HTTP_PLAY, - HTTP_PAUSE, - HTTP_TEARDOWN, - HTTP_GET_PARAMETER, - HTTP_SET_PARAMETER, - HTTP_REDIRECT, - HTTP_RECORD, - HTTP_FLUSH - - ctypedef llhttp_method llhttp_method_t; - - void llhttp_settings_init(llhttp_settings_t* settings) - void llhttp_init(llhttp_t* parser, llhttp_type type, - const llhttp_settings_t* settings) - - llhttp_errno_t llhttp_execute(llhttp_t* parser, const char* data, size_t len) - - int llhttp_should_keep_alive(const llhttp_t* parser) - - void llhttp_resume_after_upgrade(llhttp_t* parser) - - llhttp_errno_t llhttp_get_errno(const llhttp_t* parser) - const char* llhttp_get_error_reason(const llhttp_t* parser) - const char* llhttp_get_error_pos(const llhttp_t* parser) - - const char* llhttp_method_name(llhttp_method_t method) - - void llhttp_set_lenient_headers(llhttp_t* parser, int enabled) - void llhttp_set_lenient_optional_cr_before_lf(llhttp_t* parser, int enabled) - void llhttp_set_lenient_spaces_after_chunk_size(llhttp_t* parser, int enabled) diff --git a/.venv/Lib/site-packages/aiohttp/_find_header.pxd b/.venv/Lib/site-packages/aiohttp/_find_header.pxd deleted file mode 100644 index 37a6c37..0000000 --- a/.venv/Lib/site-packages/aiohttp/_find_header.pxd +++ /dev/null @@ -1,2 +0,0 @@ -cdef extern from "_find_header.h": - int find_header(char *, int) diff --git a/.venv/Lib/site-packages/aiohttp/_headers.pxi b/.venv/Lib/site-packages/aiohttp/_headers.pxi deleted file mode 100644 index 3744721..0000000 --- a/.venv/Lib/site-packages/aiohttp/_headers.pxi +++ /dev/null @@ -1,83 +0,0 @@ -# The file is autogenerated from aiohttp/hdrs.py -# Run ./tools/gen.py to update it after the origin changing. - -from . import hdrs -cdef tuple headers = ( - hdrs.ACCEPT, - hdrs.ACCEPT_CHARSET, - hdrs.ACCEPT_ENCODING, - hdrs.ACCEPT_LANGUAGE, - hdrs.ACCEPT_RANGES, - hdrs.ACCESS_CONTROL_ALLOW_CREDENTIALS, - hdrs.ACCESS_CONTROL_ALLOW_HEADERS, - hdrs.ACCESS_CONTROL_ALLOW_METHODS, - hdrs.ACCESS_CONTROL_ALLOW_ORIGIN, - hdrs.ACCESS_CONTROL_EXPOSE_HEADERS, - hdrs.ACCESS_CONTROL_MAX_AGE, - hdrs.ACCESS_CONTROL_REQUEST_HEADERS, - hdrs.ACCESS_CONTROL_REQUEST_METHOD, - hdrs.AGE, - hdrs.ALLOW, - hdrs.AUTHORIZATION, - hdrs.CACHE_CONTROL, - hdrs.CONNECTION, - hdrs.CONTENT_DISPOSITION, - hdrs.CONTENT_ENCODING, - hdrs.CONTENT_LANGUAGE, - hdrs.CONTENT_LENGTH, - hdrs.CONTENT_LOCATION, - hdrs.CONTENT_MD5, - hdrs.CONTENT_RANGE, - hdrs.CONTENT_TRANSFER_ENCODING, - hdrs.CONTENT_TYPE, - hdrs.COOKIE, - hdrs.DATE, - hdrs.DESTINATION, - hdrs.DIGEST, - hdrs.ETAG, - hdrs.EXPECT, - hdrs.EXPIRES, - hdrs.FORWARDED, - hdrs.FROM, - hdrs.HOST, - hdrs.IF_MATCH, - hdrs.IF_MODIFIED_SINCE, - hdrs.IF_NONE_MATCH, - hdrs.IF_RANGE, - hdrs.IF_UNMODIFIED_SINCE, - hdrs.KEEP_ALIVE, - hdrs.LAST_EVENT_ID, - hdrs.LAST_MODIFIED, - hdrs.LINK, - hdrs.LOCATION, - hdrs.MAX_FORWARDS, - hdrs.ORIGIN, - hdrs.PRAGMA, - hdrs.PROXY_AUTHENTICATE, - hdrs.PROXY_AUTHORIZATION, - hdrs.RANGE, - hdrs.REFERER, - hdrs.RETRY_AFTER, - hdrs.SEC_WEBSOCKET_ACCEPT, - hdrs.SEC_WEBSOCKET_EXTENSIONS, - hdrs.SEC_WEBSOCKET_KEY, - hdrs.SEC_WEBSOCKET_KEY1, - hdrs.SEC_WEBSOCKET_PROTOCOL, - hdrs.SEC_WEBSOCKET_VERSION, - hdrs.SERVER, - hdrs.SET_COOKIE, - hdrs.TE, - hdrs.TRAILER, - hdrs.TRANSFER_ENCODING, - hdrs.URI, - hdrs.UPGRADE, - hdrs.USER_AGENT, - hdrs.VARY, - hdrs.VIA, - hdrs.WWW_AUTHENTICATE, - hdrs.WANT_DIGEST, - hdrs.WARNING, - hdrs.X_FORWARDED_FOR, - hdrs.X_FORWARDED_HOST, - hdrs.X_FORWARDED_PROTO, -) diff --git a/.venv/Lib/site-packages/aiohttp/_helpers.cp311-win_amd64.pyd b/.venv/Lib/site-packages/aiohttp/_helpers.cp311-win_amd64.pyd deleted file mode 100644 index 5ce4f7c..0000000 Binary files a/.venv/Lib/site-packages/aiohttp/_helpers.cp311-win_amd64.pyd and /dev/null differ diff --git a/.venv/Lib/site-packages/aiohttp/_helpers.pyi b/.venv/Lib/site-packages/aiohttp/_helpers.pyi deleted file mode 100644 index 1e35893..0000000 --- a/.venv/Lib/site-packages/aiohttp/_helpers.pyi +++ /dev/null @@ -1,6 +0,0 @@ -from typing import Any - -class reify: - def __init__(self, wrapped: Any) -> None: ... - def __get__(self, inst: Any, owner: Any) -> Any: ... - def __set__(self, inst: Any, value: Any) -> None: ... diff --git a/.venv/Lib/site-packages/aiohttp/_helpers.pyx b/.venv/Lib/site-packages/aiohttp/_helpers.pyx deleted file mode 100644 index 665f367..0000000 --- a/.venv/Lib/site-packages/aiohttp/_helpers.pyx +++ /dev/null @@ -1,35 +0,0 @@ -cdef class reify: - """Use as a class method decorator. It operates almost exactly like - the Python `@property` decorator, but it puts the result of the - method it decorates into the instance dict after the first call, - effectively replacing the function it decorates with an instance - variable. It is, in Python parlance, a data descriptor. - - """ - - cdef object wrapped - cdef object name - - def __init__(self, wrapped): - self.wrapped = wrapped - self.name = wrapped.__name__ - - @property - def __doc__(self): - return self.wrapped.__doc__ - - def __get__(self, inst, owner): - try: - try: - return inst._cache[self.name] - except KeyError: - val = self.wrapped(inst) - inst._cache[self.name] = val - return val - except AttributeError: - if inst is None: - return self - raise - - def __set__(self, inst, value): - raise AttributeError("reified property is read-only") diff --git a/.venv/Lib/site-packages/aiohttp/_http_parser.cp311-win_amd64.pyd b/.venv/Lib/site-packages/aiohttp/_http_parser.cp311-win_amd64.pyd deleted file mode 100644 index 086a008..0000000 Binary files a/.venv/Lib/site-packages/aiohttp/_http_parser.cp311-win_amd64.pyd and /dev/null differ diff --git a/.venv/Lib/site-packages/aiohttp/_http_parser.pyx b/.venv/Lib/site-packages/aiohttp/_http_parser.pyx deleted file mode 100644 index 3f28fbd..0000000 --- a/.venv/Lib/site-packages/aiohttp/_http_parser.pyx +++ /dev/null @@ -1,836 +0,0 @@ -#cython: language_level=3 -# -# Based on https://github.com/MagicStack/httptools -# - -from cpython cimport ( - Py_buffer, - PyBUF_SIMPLE, - PyBuffer_Release, - PyBytes_AsString, - PyBytes_AsStringAndSize, - PyObject_GetBuffer, -) -from cpython.mem cimport PyMem_Free, PyMem_Malloc -from libc.limits cimport ULLONG_MAX -from libc.string cimport memcpy - -from multidict import CIMultiDict as _CIMultiDict, CIMultiDictProxy as _CIMultiDictProxy -from yarl import URL as _URL - -from aiohttp import hdrs -from aiohttp.helpers import DEBUG - -from .http_exceptions import ( - BadHttpMessage, - BadStatusLine, - ContentLengthError, - InvalidHeader, - InvalidURLError, - LineTooLong, - PayloadEncodingError, - TransferEncodingError, -) -from .http_parser import DeflateBuffer as _DeflateBuffer -from .http_writer import ( - HttpVersion as _HttpVersion, - HttpVersion10 as _HttpVersion10, - HttpVersion11 as _HttpVersion11, -) -from .streams import EMPTY_PAYLOAD as _EMPTY_PAYLOAD, StreamReader as _StreamReader - -cimport cython - -from aiohttp cimport _cparser as cparser - -include "_headers.pxi" - -from aiohttp cimport _find_header - -DEF DEFAULT_FREELIST_SIZE = 250 - -cdef extern from "Python.h": - int PyByteArray_Resize(object, Py_ssize_t) except -1 - Py_ssize_t PyByteArray_Size(object) except -1 - char* PyByteArray_AsString(object) - -__all__ = ('HttpRequestParser', 'HttpResponseParser', - 'RawRequestMessage', 'RawResponseMessage') - -cdef object URL = _URL -cdef object URL_build = URL.build -cdef object CIMultiDict = _CIMultiDict -cdef object CIMultiDictProxy = _CIMultiDictProxy -cdef object HttpVersion = _HttpVersion -cdef object HttpVersion10 = _HttpVersion10 -cdef object HttpVersion11 = _HttpVersion11 -cdef object SEC_WEBSOCKET_KEY1 = hdrs.SEC_WEBSOCKET_KEY1 -cdef object CONTENT_ENCODING = hdrs.CONTENT_ENCODING -cdef object EMPTY_PAYLOAD = _EMPTY_PAYLOAD -cdef object StreamReader = _StreamReader -cdef object DeflateBuffer = _DeflateBuffer - - -cdef inline object extend(object buf, const char* at, size_t length): - cdef Py_ssize_t s - cdef char* ptr - s = PyByteArray_Size(buf) - PyByteArray_Resize(buf, s + length) - ptr = PyByteArray_AsString(buf) - memcpy(ptr + s, at, length) - - -DEF METHODS_COUNT = 46; - -cdef list _http_method = [] - -for i in range(METHODS_COUNT): - _http_method.append( - cparser.llhttp_method_name( i).decode('ascii')) - - -cdef inline str http_method_str(int i): - if i < METHODS_COUNT: - return _http_method[i] - else: - return "" - -cdef inline object find_header(bytes raw_header): - cdef Py_ssize_t size - cdef char *buf - cdef int idx - PyBytes_AsStringAndSize(raw_header, &buf, &size) - idx = _find_header.find_header(buf, size) - if idx == -1: - return raw_header.decode('utf-8', 'surrogateescape') - return headers[idx] - - -@cython.freelist(DEFAULT_FREELIST_SIZE) -cdef class RawRequestMessage: - cdef readonly str method - cdef readonly str path - cdef readonly object version # HttpVersion - cdef readonly object headers # CIMultiDict - cdef readonly object raw_headers # tuple - cdef readonly object should_close - cdef readonly object compression - cdef readonly object upgrade - cdef readonly object chunked - cdef readonly object url # yarl.URL - - def __init__(self, method, path, version, headers, raw_headers, - should_close, compression, upgrade, chunked, url): - self.method = method - self.path = path - self.version = version - self.headers = headers - self.raw_headers = raw_headers - self.should_close = should_close - self.compression = compression - self.upgrade = upgrade - self.chunked = chunked - self.url = url - - def __repr__(self): - info = [] - info.append(("method", self.method)) - info.append(("path", self.path)) - info.append(("version", self.version)) - info.append(("headers", self.headers)) - info.append(("raw_headers", self.raw_headers)) - info.append(("should_close", self.should_close)) - info.append(("compression", self.compression)) - info.append(("upgrade", self.upgrade)) - info.append(("chunked", self.chunked)) - info.append(("url", self.url)) - sinfo = ', '.join(name + '=' + repr(val) for name, val in info) - return '' - - def _replace(self, **dct): - cdef RawRequestMessage ret - ret = _new_request_message(self.method, - self.path, - self.version, - self.headers, - self.raw_headers, - self.should_close, - self.compression, - self.upgrade, - self.chunked, - self.url) - if "method" in dct: - ret.method = dct["method"] - if "path" in dct: - ret.path = dct["path"] - if "version" in dct: - ret.version = dct["version"] - if "headers" in dct: - ret.headers = dct["headers"] - if "raw_headers" in dct: - ret.raw_headers = dct["raw_headers"] - if "should_close" in dct: - ret.should_close = dct["should_close"] - if "compression" in dct: - ret.compression = dct["compression"] - if "upgrade" in dct: - ret.upgrade = dct["upgrade"] - if "chunked" in dct: - ret.chunked = dct["chunked"] - if "url" in dct: - ret.url = dct["url"] - return ret - -cdef _new_request_message(str method, - str path, - object version, - object headers, - object raw_headers, - bint should_close, - object compression, - bint upgrade, - bint chunked, - object url): - cdef RawRequestMessage ret - ret = RawRequestMessage.__new__(RawRequestMessage) - ret.method = method - ret.path = path - ret.version = version - ret.headers = headers - ret.raw_headers = raw_headers - ret.should_close = should_close - ret.compression = compression - ret.upgrade = upgrade - ret.chunked = chunked - ret.url = url - return ret - - -@cython.freelist(DEFAULT_FREELIST_SIZE) -cdef class RawResponseMessage: - cdef readonly object version # HttpVersion - cdef readonly int code - cdef readonly str reason - cdef readonly object headers # CIMultiDict - cdef readonly object raw_headers # tuple - cdef readonly object should_close - cdef readonly object compression - cdef readonly object upgrade - cdef readonly object chunked - - def __init__(self, version, code, reason, headers, raw_headers, - should_close, compression, upgrade, chunked): - self.version = version - self.code = code - self.reason = reason - self.headers = headers - self.raw_headers = raw_headers - self.should_close = should_close - self.compression = compression - self.upgrade = upgrade - self.chunked = chunked - - def __repr__(self): - info = [] - info.append(("version", self.version)) - info.append(("code", self.code)) - info.append(("reason", self.reason)) - info.append(("headers", self.headers)) - info.append(("raw_headers", self.raw_headers)) - info.append(("should_close", self.should_close)) - info.append(("compression", self.compression)) - info.append(("upgrade", self.upgrade)) - info.append(("chunked", self.chunked)) - sinfo = ', '.join(name + '=' + repr(val) for name, val in info) - return '' - - -cdef _new_response_message(object version, - int code, - str reason, - object headers, - object raw_headers, - bint should_close, - object compression, - bint upgrade, - bint chunked): - cdef RawResponseMessage ret - ret = RawResponseMessage.__new__(RawResponseMessage) - ret.version = version - ret.code = code - ret.reason = reason - ret.headers = headers - ret.raw_headers = raw_headers - ret.should_close = should_close - ret.compression = compression - ret.upgrade = upgrade - ret.chunked = chunked - return ret - - -@cython.internal -cdef class HttpParser: - - cdef: - cparser.llhttp_t* _cparser - cparser.llhttp_settings_t* _csettings - - bytearray _raw_name - bytearray _raw_value - bint _has_value - - object _protocol - object _loop - object _timer - - size_t _max_line_size - size_t _max_field_size - size_t _max_headers - bint _response_with_body - bint _read_until_eof - - bint _started - object _url - bytearray _buf - str _path - str _reason - object _headers - list _raw_headers - bint _upgraded - list _messages - object _payload - bint _payload_error - object _payload_exception - object _last_error - bint _auto_decompress - int _limit - - str _content_encoding - - Py_buffer py_buf - - def __cinit__(self): - self._cparser = \ - PyMem_Malloc(sizeof(cparser.llhttp_t)) - if self._cparser is NULL: - raise MemoryError() - - self._csettings = \ - PyMem_Malloc(sizeof(cparser.llhttp_settings_t)) - if self._csettings is NULL: - raise MemoryError() - - def __dealloc__(self): - PyMem_Free(self._cparser) - PyMem_Free(self._csettings) - - cdef _init( - self, cparser.llhttp_type mode, - object protocol, object loop, int limit, - object timer=None, - size_t max_line_size=8190, size_t max_headers=32768, - size_t max_field_size=8190, payload_exception=None, - bint response_with_body=True, bint read_until_eof=False, - bint auto_decompress=True, - ): - cparser.llhttp_settings_init(self._csettings) - cparser.llhttp_init(self._cparser, mode, self._csettings) - self._cparser.data = self - self._cparser.content_length = 0 - - self._protocol = protocol - self._loop = loop - self._timer = timer - - self._buf = bytearray() - self._payload = None - self._payload_error = 0 - self._payload_exception = payload_exception - self._messages = [] - - self._raw_name = bytearray() - self._raw_value = bytearray() - self._has_value = False - - self._max_line_size = max_line_size - self._max_headers = max_headers - self._max_field_size = max_field_size - self._response_with_body = response_with_body - self._read_until_eof = read_until_eof - self._upgraded = False - self._auto_decompress = auto_decompress - self._content_encoding = None - - self._csettings.on_url = cb_on_url - self._csettings.on_status = cb_on_status - self._csettings.on_header_field = cb_on_header_field - self._csettings.on_header_value = cb_on_header_value - self._csettings.on_headers_complete = cb_on_headers_complete - self._csettings.on_body = cb_on_body - self._csettings.on_message_begin = cb_on_message_begin - self._csettings.on_message_complete = cb_on_message_complete - self._csettings.on_chunk_header = cb_on_chunk_header - self._csettings.on_chunk_complete = cb_on_chunk_complete - - self._last_error = None - self._limit = limit - - cdef _process_header(self): - if self._raw_name: - raw_name = bytes(self._raw_name) - raw_value = bytes(self._raw_value) - - name = find_header(raw_name) - value = raw_value.decode('utf-8', 'surrogateescape') - - self._headers.add(name, value) - - if name is CONTENT_ENCODING: - self._content_encoding = value - - PyByteArray_Resize(self._raw_name, 0) - PyByteArray_Resize(self._raw_value, 0) - self._has_value = False - self._raw_headers.append((raw_name, raw_value)) - - cdef _on_header_field(self, char* at, size_t length): - cdef Py_ssize_t size - cdef char *buf - if self._has_value: - self._process_header() - - size = PyByteArray_Size(self._raw_name) - PyByteArray_Resize(self._raw_name, size + length) - buf = PyByteArray_AsString(self._raw_name) - memcpy(buf + size, at, length) - - cdef _on_header_value(self, char* at, size_t length): - cdef Py_ssize_t size - cdef char *buf - - size = PyByteArray_Size(self._raw_value) - PyByteArray_Resize(self._raw_value, size + length) - buf = PyByteArray_AsString(self._raw_value) - memcpy(buf + size, at, length) - self._has_value = True - - cdef _on_headers_complete(self): - self._process_header() - - method = http_method_str(self._cparser.method) - should_close = not cparser.llhttp_should_keep_alive(self._cparser) - upgrade = self._cparser.upgrade - chunked = self._cparser.flags & cparser.F_CHUNKED - - raw_headers = tuple(self._raw_headers) - headers = CIMultiDictProxy(self._headers) - - if upgrade or self._cparser.method == cparser.HTTP_CONNECT: - self._upgraded = True - - # do not support old websocket spec - if SEC_WEBSOCKET_KEY1 in headers: - raise InvalidHeader(SEC_WEBSOCKET_KEY1) - - encoding = None - enc = self._content_encoding - if enc is not None: - self._content_encoding = None - enc = enc.lower() - if enc in ('gzip', 'deflate', 'br'): - encoding = enc - - if self._cparser.type == cparser.HTTP_REQUEST: - msg = _new_request_message( - method, self._path, - self.http_version(), headers, raw_headers, - should_close, encoding, upgrade, chunked, self._url) - else: - msg = _new_response_message( - self.http_version(), self._cparser.status_code, self._reason, - headers, raw_headers, should_close, encoding, - upgrade, chunked) - - if ( - ULLONG_MAX > self._cparser.content_length > 0 or chunked or - self._cparser.method == cparser.HTTP_CONNECT or - (self._cparser.status_code >= 199 and - self._cparser.content_length == 0 and - self._read_until_eof) - ): - payload = StreamReader( - self._protocol, timer=self._timer, loop=self._loop, - limit=self._limit) - else: - payload = EMPTY_PAYLOAD - - self._payload = payload - if encoding is not None and self._auto_decompress: - self._payload = DeflateBuffer(payload, encoding) - - if not self._response_with_body: - payload = EMPTY_PAYLOAD - - self._messages.append((msg, payload)) - - cdef _on_message_complete(self): - self._payload.feed_eof() - self._payload = None - - cdef _on_chunk_header(self): - self._payload.begin_http_chunk_receiving() - - cdef _on_chunk_complete(self): - self._payload.end_http_chunk_receiving() - - cdef object _on_status_complete(self): - pass - - cdef inline http_version(self): - cdef cparser.llhttp_t* parser = self._cparser - - if parser.http_major == 1: - if parser.http_minor == 0: - return HttpVersion10 - elif parser.http_minor == 1: - return HttpVersion11 - - return HttpVersion(parser.http_major, parser.http_minor) - - ### Public API ### - - def feed_eof(self): - cdef bytes desc - - if self._payload is not None: - if self._cparser.flags & cparser.F_CHUNKED: - raise TransferEncodingError( - "Not enough data for satisfy transfer length header.") - elif self._cparser.flags & cparser.F_CONTENT_LENGTH: - raise ContentLengthError( - "Not enough data for satisfy content length header.") - elif cparser.llhttp_get_errno(self._cparser) != cparser.HPE_OK: - desc = cparser.llhttp_get_error_reason(self._cparser) - raise PayloadEncodingError(desc.decode('latin-1')) - else: - self._payload.feed_eof() - elif self._started: - self._on_headers_complete() - if self._messages: - return self._messages[-1][0] - - def feed_data(self, data): - cdef: - size_t data_len - size_t nb - cdef cparser.llhttp_errno_t errno - - PyObject_GetBuffer(data, &self.py_buf, PyBUF_SIMPLE) - data_len = self.py_buf.len - - errno = cparser.llhttp_execute( - self._cparser, - self.py_buf.buf, - data_len) - - if errno is cparser.HPE_PAUSED_UPGRADE: - cparser.llhttp_resume_after_upgrade(self._cparser) - - nb = cparser.llhttp_get_error_pos(self._cparser) - self.py_buf.buf - - PyBuffer_Release(&self.py_buf) - - if errno not in (cparser.HPE_OK, cparser.HPE_PAUSED_UPGRADE): - if self._payload_error == 0: - if self._last_error is not None: - ex = self._last_error - self._last_error = None - else: - after = cparser.llhttp_get_error_pos(self._cparser) - before = data[:after - self.py_buf.buf] - after_b = after.split(b"\r\n", 1)[0] - before = before.rsplit(b"\r\n", 1)[-1] - data = before + after_b - pointer = " " * (len(repr(before))-1) + "^" - ex = parser_error_from_errno(self._cparser, data, pointer) - self._payload = None - raise ex - - if self._messages: - messages = self._messages - self._messages = [] - else: - messages = () - - if self._upgraded: - return messages, True, data[nb:] - else: - return messages, False, b'' - - def set_upgraded(self, val): - self._upgraded = val - - -cdef class HttpRequestParser(HttpParser): - - def __init__( - self, protocol, loop, int limit, timer=None, - size_t max_line_size=8190, size_t max_headers=32768, - size_t max_field_size=8190, payload_exception=None, - bint response_with_body=True, bint read_until_eof=False, - bint auto_decompress=True, - ): - self._init(cparser.HTTP_REQUEST, protocol, loop, limit, timer, - max_line_size, max_headers, max_field_size, - payload_exception, response_with_body, read_until_eof, - auto_decompress) - - cdef object _on_status_complete(self): - cdef int idx1, idx2 - if not self._buf: - return - self._path = self._buf.decode('utf-8', 'surrogateescape') - try: - idx3 = len(self._path) - if self._cparser.method == cparser.HTTP_CONNECT: - # authority-form, - # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.3 - self._url = URL.build(authority=self._path, encoded=True) - elif idx3 > 1 and self._path[0] == '/': - # origin-form, - # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.1 - idx1 = self._path.find("?") - if idx1 == -1: - query = "" - idx2 = self._path.find("#") - if idx2 == -1: - path = self._path - fragment = "" - else: - path = self._path[0: idx2] - fragment = self._path[idx2+1:] - - else: - path = self._path[0:idx1] - idx1 += 1 - idx2 = self._path.find("#", idx1+1) - if idx2 == -1: - query = self._path[idx1:] - fragment = "" - else: - query = self._path[idx1: idx2] - fragment = self._path[idx2+1:] - - self._url = URL.build( - path=path, - query_string=query, - fragment=fragment, - encoded=True, - ) - else: - # absolute-form for proxy maybe, - # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.2 - self._url = URL(self._path, encoded=True) - finally: - PyByteArray_Resize(self._buf, 0) - - -cdef class HttpResponseParser(HttpParser): - - def __init__( - self, protocol, loop, int limit, timer=None, - size_t max_line_size=8190, size_t max_headers=32768, - size_t max_field_size=8190, payload_exception=None, - bint response_with_body=True, bint read_until_eof=False, - bint auto_decompress=True - ): - self._init(cparser.HTTP_RESPONSE, protocol, loop, limit, timer, - max_line_size, max_headers, max_field_size, - payload_exception, response_with_body, read_until_eof, - auto_decompress) - # Use strict parsing on dev mode, so users are warned about broken servers. - if not DEBUG: - cparser.llhttp_set_lenient_headers(self._cparser, 1) - cparser.llhttp_set_lenient_optional_cr_before_lf(self._cparser, 1) - cparser.llhttp_set_lenient_spaces_after_chunk_size(self._cparser, 1) - - cdef object _on_status_complete(self): - if self._buf: - self._reason = self._buf.decode('utf-8', 'surrogateescape') - PyByteArray_Resize(self._buf, 0) - else: - self._reason = self._reason or '' - -cdef int cb_on_message_begin(cparser.llhttp_t* parser) except -1: - cdef HttpParser pyparser = parser.data - - pyparser._started = True - pyparser._headers = CIMultiDict() - pyparser._raw_headers = [] - PyByteArray_Resize(pyparser._buf, 0) - pyparser._path = None - pyparser._reason = None - return 0 - - -cdef int cb_on_url(cparser.llhttp_t* parser, - const char *at, size_t length) except -1: - cdef HttpParser pyparser = parser.data - try: - if length > pyparser._max_line_size: - raise LineTooLong( - 'Status line is too long', pyparser._max_line_size, length) - extend(pyparser._buf, at, length) - except BaseException as ex: - pyparser._last_error = ex - return -1 - else: - return 0 - - -cdef int cb_on_status(cparser.llhttp_t* parser, - const char *at, size_t length) except -1: - cdef HttpParser pyparser = parser.data - cdef str reason - try: - if length > pyparser._max_line_size: - raise LineTooLong( - 'Status line is too long', pyparser._max_line_size, length) - extend(pyparser._buf, at, length) - except BaseException as ex: - pyparser._last_error = ex - return -1 - else: - return 0 - - -cdef int cb_on_header_field(cparser.llhttp_t* parser, - const char *at, size_t length) except -1: - cdef HttpParser pyparser = parser.data - cdef Py_ssize_t size - try: - pyparser._on_status_complete() - size = len(pyparser._raw_name) + length - if size > pyparser._max_field_size: - raise LineTooLong( - 'Header name is too long', pyparser._max_field_size, size) - pyparser._on_header_field(at, length) - except BaseException as ex: - pyparser._last_error = ex - return -1 - else: - return 0 - - -cdef int cb_on_header_value(cparser.llhttp_t* parser, - const char *at, size_t length) except -1: - cdef HttpParser pyparser = parser.data - cdef Py_ssize_t size - try: - size = len(pyparser._raw_value) + length - if size > pyparser._max_field_size: - raise LineTooLong( - 'Header value is too long', pyparser._max_field_size, size) - pyparser._on_header_value(at, length) - except BaseException as ex: - pyparser._last_error = ex - return -1 - else: - return 0 - - -cdef int cb_on_headers_complete(cparser.llhttp_t* parser) except -1: - cdef HttpParser pyparser = parser.data - try: - pyparser._on_status_complete() - pyparser._on_headers_complete() - except BaseException as exc: - pyparser._last_error = exc - return -1 - else: - if ( - pyparser._cparser.upgrade or - pyparser._cparser.method == cparser.HTTP_CONNECT - ): - return 2 - else: - return 0 - - -cdef int cb_on_body(cparser.llhttp_t* parser, - const char *at, size_t length) except -1: - cdef HttpParser pyparser = parser.data - cdef bytes body = at[:length] - try: - pyparser._payload.feed_data(body, length) - except BaseException as exc: - if pyparser._payload_exception is not None: - pyparser._payload.set_exception(pyparser._payload_exception(str(exc))) - else: - pyparser._payload.set_exception(exc) - pyparser._payload_error = 1 - return -1 - else: - return 0 - - -cdef int cb_on_message_complete(cparser.llhttp_t* parser) except -1: - cdef HttpParser pyparser = parser.data - try: - pyparser._started = False - pyparser._on_message_complete() - except BaseException as exc: - pyparser._last_error = exc - return -1 - else: - return 0 - - -cdef int cb_on_chunk_header(cparser.llhttp_t* parser) except -1: - cdef HttpParser pyparser = parser.data - try: - pyparser._on_chunk_header() - except BaseException as exc: - pyparser._last_error = exc - return -1 - else: - return 0 - - -cdef int cb_on_chunk_complete(cparser.llhttp_t* parser) except -1: - cdef HttpParser pyparser = parser.data - try: - pyparser._on_chunk_complete() - except BaseException as exc: - pyparser._last_error = exc - return -1 - else: - return 0 - - -cdef parser_error_from_errno(cparser.llhttp_t* parser, data, pointer): - cdef cparser.llhttp_errno_t errno = cparser.llhttp_get_errno(parser) - cdef bytes desc = cparser.llhttp_get_error_reason(parser) - - err_msg = "{}:\n\n {!r}\n {}".format(desc.decode("latin-1"), data, pointer) - - if errno in {cparser.HPE_CB_MESSAGE_BEGIN, - cparser.HPE_CB_HEADERS_COMPLETE, - cparser.HPE_CB_MESSAGE_COMPLETE, - cparser.HPE_CB_CHUNK_HEADER, - cparser.HPE_CB_CHUNK_COMPLETE, - cparser.HPE_INVALID_CONSTANT, - cparser.HPE_INVALID_HEADER_TOKEN, - cparser.HPE_INVALID_CONTENT_LENGTH, - cparser.HPE_INVALID_CHUNK_SIZE, - cparser.HPE_INVALID_EOF_STATE, - cparser.HPE_INVALID_TRANSFER_ENCODING}: - return BadHttpMessage(err_msg) - elif errno in {cparser.HPE_INVALID_STATUS, - cparser.HPE_INVALID_METHOD, - cparser.HPE_INVALID_VERSION}: - return BadStatusLine(error=err_msg) - elif errno == cparser.HPE_INVALID_URL: - return InvalidURLError(err_msg) - - return BadHttpMessage(err_msg) diff --git a/.venv/Lib/site-packages/aiohttp/_http_writer.cp311-win_amd64.pyd b/.venv/Lib/site-packages/aiohttp/_http_writer.cp311-win_amd64.pyd deleted file mode 100644 index 22b7ed7..0000000 Binary files a/.venv/Lib/site-packages/aiohttp/_http_writer.cp311-win_amd64.pyd and /dev/null differ diff --git a/.venv/Lib/site-packages/aiohttp/_http_writer.pyx b/.venv/Lib/site-packages/aiohttp/_http_writer.pyx deleted file mode 100644 index eff8521..0000000 --- a/.venv/Lib/site-packages/aiohttp/_http_writer.pyx +++ /dev/null @@ -1,163 +0,0 @@ -from cpython.bytes cimport PyBytes_FromStringAndSize -from cpython.exc cimport PyErr_NoMemory -from cpython.mem cimport PyMem_Free, PyMem_Malloc, PyMem_Realloc -from cpython.object cimport PyObject_Str -from libc.stdint cimport uint8_t, uint64_t -from libc.string cimport memcpy - -from multidict import istr - -DEF BUF_SIZE = 16 * 1024 # 16KiB -cdef char BUFFER[BUF_SIZE] - -cdef object _istr = istr - - -# ----------------- writer --------------------------- - -cdef struct Writer: - char *buf - Py_ssize_t size - Py_ssize_t pos - - -cdef inline void _init_writer(Writer* writer): - writer.buf = &BUFFER[0] - writer.size = BUF_SIZE - writer.pos = 0 - - -cdef inline void _release_writer(Writer* writer): - if writer.buf != BUFFER: - PyMem_Free(writer.buf) - - -cdef inline int _write_byte(Writer* writer, uint8_t ch): - cdef char * buf - cdef Py_ssize_t size - - if writer.pos == writer.size: - # reallocate - size = writer.size + BUF_SIZE - if writer.buf == BUFFER: - buf = PyMem_Malloc(size) - if buf == NULL: - PyErr_NoMemory() - return -1 - memcpy(buf, writer.buf, writer.size) - else: - buf = PyMem_Realloc(writer.buf, size) - if buf == NULL: - PyErr_NoMemory() - return -1 - writer.buf = buf - writer.size = size - writer.buf[writer.pos] = ch - writer.pos += 1 - return 0 - - -cdef inline int _write_utf8(Writer* writer, Py_UCS4 symbol): - cdef uint64_t utf = symbol - - if utf < 0x80: - return _write_byte(writer, utf) - elif utf < 0x800: - if _write_byte(writer, (0xc0 | (utf >> 6))) < 0: - return -1 - return _write_byte(writer, (0x80 | (utf & 0x3f))) - elif 0xD800 <= utf <= 0xDFFF: - # surogate pair, ignored - return 0 - elif utf < 0x10000: - if _write_byte(writer, (0xe0 | (utf >> 12))) < 0: - return -1 - if _write_byte(writer, (0x80 | ((utf >> 6) & 0x3f))) < 0: - return -1 - return _write_byte(writer, (0x80 | (utf & 0x3f))) - elif utf > 0x10FFFF: - # symbol is too large - return 0 - else: - if _write_byte(writer, (0xf0 | (utf >> 18))) < 0: - return -1 - if _write_byte(writer, - (0x80 | ((utf >> 12) & 0x3f))) < 0: - return -1 - if _write_byte(writer, - (0x80 | ((utf >> 6) & 0x3f))) < 0: - return -1 - return _write_byte(writer, (0x80 | (utf & 0x3f))) - - -cdef inline int _write_str(Writer* writer, str s): - cdef Py_UCS4 ch - for ch in s: - if _write_utf8(writer, ch) < 0: - return -1 - - -# --------------- _serialize_headers ---------------------- - -cdef str to_str(object s): - typ = type(s) - if typ is str: - return s - elif typ is _istr: - return PyObject_Str(s) - elif not isinstance(s, str): - raise TypeError("Cannot serialize non-str key {!r}".format(s)) - else: - return str(s) - - -cdef void _safe_header(str string) except *: - if "\r" in string or "\n" in string: - raise ValueError( - "Newline or carriage return character detected in HTTP status message or " - "header. This is a potential security issue." - ) - - -def _serialize_headers(str status_line, headers): - cdef Writer writer - cdef object key - cdef object val - cdef bytes ret - - _init_writer(&writer) - - for key, val in headers.items(): - _safe_header(to_str(key)) - _safe_header(to_str(val)) - - try: - if _write_str(&writer, status_line) < 0: - raise - if _write_byte(&writer, b'\r') < 0: - raise - if _write_byte(&writer, b'\n') < 0: - raise - - for key, val in headers.items(): - if _write_str(&writer, to_str(key)) < 0: - raise - if _write_byte(&writer, b':') < 0: - raise - if _write_byte(&writer, b' ') < 0: - raise - if _write_str(&writer, to_str(val)) < 0: - raise - if _write_byte(&writer, b'\r') < 0: - raise - if _write_byte(&writer, b'\n') < 0: - raise - - if _write_byte(&writer, b'\r') < 0: - raise - if _write_byte(&writer, b'\n') < 0: - raise - - return PyBytes_FromStringAndSize(writer.buf, writer.pos) - finally: - _release_writer(&writer) diff --git a/.venv/Lib/site-packages/aiohttp/_websocket.cp311-win_amd64.pyd b/.venv/Lib/site-packages/aiohttp/_websocket.cp311-win_amd64.pyd deleted file mode 100644 index 1480be2..0000000 Binary files a/.venv/Lib/site-packages/aiohttp/_websocket.cp311-win_amd64.pyd and /dev/null differ diff --git a/.venv/Lib/site-packages/aiohttp/_websocket.pyx b/.venv/Lib/site-packages/aiohttp/_websocket.pyx deleted file mode 100644 index 94318d2..0000000 --- a/.venv/Lib/site-packages/aiohttp/_websocket.pyx +++ /dev/null @@ -1,56 +0,0 @@ -from cpython cimport PyBytes_AsString - - -#from cpython cimport PyByteArray_AsString # cython still not exports that -cdef extern from "Python.h": - char* PyByteArray_AsString(bytearray ba) except NULL - -from libc.stdint cimport uint32_t, uint64_t, uintmax_t - - -def _websocket_mask_cython(object mask, object data): - """Note, this function mutates its `data` argument - """ - cdef: - Py_ssize_t data_len, i - # bit operations on signed integers are implementation-specific - unsigned char * in_buf - const unsigned char * mask_buf - uint32_t uint32_msk - uint64_t uint64_msk - - assert len(mask) == 4 - - if not isinstance(mask, bytes): - mask = bytes(mask) - - if isinstance(data, bytearray): - data = data - else: - data = bytearray(data) - - data_len = len(data) - in_buf = PyByteArray_AsString(data) - mask_buf = PyBytes_AsString(mask) - uint32_msk = (mask_buf)[0] - - # TODO: align in_data ptr to achieve even faster speeds - # does it need in python ?! malloc() always aligns to sizeof(long) bytes - - if sizeof(size_t) >= 8: - uint64_msk = uint32_msk - uint64_msk = (uint64_msk << 32) | uint32_msk - - while data_len >= 8: - (in_buf)[0] ^= uint64_msk - in_buf += 8 - data_len -= 8 - - - while data_len >= 4: - (in_buf)[0] ^= uint32_msk - in_buf += 4 - data_len -= 4 - - for i in range(0, data_len): - in_buf[i] ^= mask_buf[i] diff --git a/.venv/Lib/site-packages/aiohttp/abc.py b/.venv/Lib/site-packages/aiohttp/abc.py deleted file mode 100644 index ee83899..0000000 --- a/.venv/Lib/site-packages/aiohttp/abc.py +++ /dev/null @@ -1,209 +0,0 @@ -import asyncio -import logging -from abc import ABC, abstractmethod -from collections.abc import Sized -from http.cookies import BaseCookie, Morsel -from typing import ( - TYPE_CHECKING, - Any, - Awaitable, - Callable, - Dict, - Generator, - Iterable, - List, - Optional, - Tuple, -) - -from multidict import CIMultiDict -from yarl import URL - -from .helpers import get_running_loop -from .typedefs import LooseCookies - -if TYPE_CHECKING: - from .web_app import Application - from .web_exceptions import HTTPException - from .web_request import BaseRequest, Request - from .web_response import StreamResponse -else: - BaseRequest = Request = Application = StreamResponse = None - HTTPException = None - - -class AbstractRouter(ABC): - def __init__(self) -> None: - self._frozen = False - - def post_init(self, app: Application) -> None: - """Post init stage. - - Not an abstract method for sake of backward compatibility, - but if the router wants to be aware of the application - it can override this. - """ - - @property - def frozen(self) -> bool: - return self._frozen - - def freeze(self) -> None: - """Freeze router.""" - self._frozen = True - - @abstractmethod - async def resolve(self, request: Request) -> "AbstractMatchInfo": - """Return MATCH_INFO for given request""" - - -class AbstractMatchInfo(ABC): - @property # pragma: no branch - @abstractmethod - def handler(self) -> Callable[[Request], Awaitable[StreamResponse]]: - """Execute matched request handler""" - - @property - @abstractmethod - def expect_handler( - self, - ) -> Callable[[Request], Awaitable[Optional[StreamResponse]]]: - """Expect handler for 100-continue processing""" - - @property # pragma: no branch - @abstractmethod - def http_exception(self) -> Optional[HTTPException]: - """HTTPException instance raised on router's resolving, or None""" - - @abstractmethod # pragma: no branch - def get_info(self) -> Dict[str, Any]: - """Return a dict with additional info useful for introspection""" - - @property # pragma: no branch - @abstractmethod - def apps(self) -> Tuple[Application, ...]: - """Stack of nested applications. - - Top level application is left-most element. - - """ - - @abstractmethod - def add_app(self, app: Application) -> None: - """Add application to the nested apps stack.""" - - @abstractmethod - def freeze(self) -> None: - """Freeze the match info. - - The method is called after route resolution. - - After the call .add_app() is forbidden. - - """ - - -class AbstractView(ABC): - """Abstract class based view.""" - - def __init__(self, request: Request) -> None: - self._request = request - - @property - def request(self) -> Request: - """Request instance.""" - return self._request - - @abstractmethod - def __await__(self) -> Generator[Any, None, StreamResponse]: - """Execute the view handler.""" - - -class AbstractResolver(ABC): - """Abstract DNS resolver.""" - - @abstractmethod - async def resolve(self, host: str, port: int, family: int) -> List[Dict[str, Any]]: - """Return IP address for given hostname""" - - @abstractmethod - async def close(self) -> None: - """Release resolver""" - - -if TYPE_CHECKING: - IterableBase = Iterable[Morsel[str]] -else: - IterableBase = Iterable - - -ClearCookiePredicate = Callable[["Morsel[str]"], bool] - - -class AbstractCookieJar(Sized, IterableBase): - """Abstract Cookie Jar.""" - - def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None: - self._loop = get_running_loop(loop) - - @abstractmethod - def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None: - """Clear all cookies if no predicate is passed.""" - - @abstractmethod - def clear_domain(self, domain: str) -> None: - """Clear all cookies for domain and all subdomains.""" - - @abstractmethod - def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None: - """Update cookies.""" - - @abstractmethod - def filter_cookies(self, request_url: URL) -> "BaseCookie[str]": - """Return the jar's cookies filtered by their attributes.""" - - -class AbstractStreamWriter(ABC): - """Abstract stream writer.""" - - buffer_size = 0 - output_size = 0 - length: Optional[int] = 0 - - @abstractmethod - async def write(self, chunk: bytes) -> None: - """Write chunk into stream.""" - - @abstractmethod - async def write_eof(self, chunk: bytes = b"") -> None: - """Write last chunk.""" - - @abstractmethod - async def drain(self) -> None: - """Flush the write buffer.""" - - @abstractmethod - def enable_compression(self, encoding: str = "deflate") -> None: - """Enable HTTP body compression""" - - @abstractmethod - def enable_chunking(self) -> None: - """Enable HTTP chunked mode""" - - @abstractmethod - async def write_headers( - self, status_line: str, headers: "CIMultiDict[str]" - ) -> None: - """Write HTTP headers""" - - -class AbstractAccessLogger(ABC): - """Abstract writer to access log.""" - - def __init__(self, logger: logging.Logger, log_format: str) -> None: - self.logger = logger - self.log_format = log_format - - @abstractmethod - def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None: - """Emit log to logger.""" diff --git a/.venv/Lib/site-packages/aiohttp/base_protocol.py b/.venv/Lib/site-packages/aiohttp/base_protocol.py deleted file mode 100644 index 4c9f0a7..0000000 --- a/.venv/Lib/site-packages/aiohttp/base_protocol.py +++ /dev/null @@ -1,90 +0,0 @@ -import asyncio -from typing import Optional, cast - -from .tcp_helpers import tcp_nodelay - - -class BaseProtocol(asyncio.Protocol): - __slots__ = ( - "_loop", - "_paused", - "_drain_waiter", - "_connection_lost", - "_reading_paused", - "transport", - ) - - def __init__(self, loop: asyncio.AbstractEventLoop) -> None: - self._loop: asyncio.AbstractEventLoop = loop - self._paused = False - self._drain_waiter: Optional[asyncio.Future[None]] = None - self._reading_paused = False - - self.transport: Optional[asyncio.Transport] = None - - @property - def connected(self) -> bool: - """Return True if the connection is open.""" - return self.transport is not None - - def pause_writing(self) -> None: - assert not self._paused - self._paused = True - - def resume_writing(self) -> None: - assert self._paused - self._paused = False - - waiter = self._drain_waiter - if waiter is not None: - self._drain_waiter = None - if not waiter.done(): - waiter.set_result(None) - - def pause_reading(self) -> None: - if not self._reading_paused and self.transport is not None: - try: - self.transport.pause_reading() - except (AttributeError, NotImplementedError, RuntimeError): - pass - self._reading_paused = True - - def resume_reading(self) -> None: - if self._reading_paused and self.transport is not None: - try: - self.transport.resume_reading() - except (AttributeError, NotImplementedError, RuntimeError): - pass - self._reading_paused = False - - def connection_made(self, transport: asyncio.BaseTransport) -> None: - tr = cast(asyncio.Transport, transport) - tcp_nodelay(tr, True) - self.transport = tr - - def connection_lost(self, exc: Optional[BaseException]) -> None: - # Wake up the writer if currently paused. - self.transport = None - if not self._paused: - return - waiter = self._drain_waiter - if waiter is None: - return - self._drain_waiter = None - if waiter.done(): - return - if exc is None: - waiter.set_result(None) - else: - waiter.set_exception(exc) - - async def _drain_helper(self) -> None: - if not self.connected: - raise ConnectionResetError("Connection lost") - if not self._paused: - return - waiter = self._drain_waiter - if waiter is None: - waiter = self._loop.create_future() - self._drain_waiter = waiter - await asyncio.shield(waiter) diff --git a/.venv/Lib/site-packages/aiohttp/client.py b/.venv/Lib/site-packages/aiohttp/client.py deleted file mode 100644 index 7e9b32f..0000000 --- a/.venv/Lib/site-packages/aiohttp/client.py +++ /dev/null @@ -1,1363 +0,0 @@ -"""HTTP Client for asyncio.""" - -import asyncio -import base64 -import hashlib -import json -import os -import sys -import traceback -import warnings -from contextlib import suppress -from types import SimpleNamespace, TracebackType -from typing import ( - TYPE_CHECKING, - Any, - Awaitable, - Callable, - Coroutine, - Final, - FrozenSet, - Generator, - Generic, - Iterable, - List, - Mapping, - Optional, - Set, - Tuple, - Type, - TypeVar, - Union, -) - -import attr -from multidict import CIMultiDict, MultiDict, MultiDictProxy, istr -from yarl import URL - -from . import hdrs, http, payload -from .abc import AbstractCookieJar -from .client_exceptions import ( - ClientConnectionError as ClientConnectionError, - ClientConnectorCertificateError as ClientConnectorCertificateError, - ClientConnectorError as ClientConnectorError, - ClientConnectorSSLError as ClientConnectorSSLError, - ClientError as ClientError, - ClientHttpProxyError as ClientHttpProxyError, - ClientOSError as ClientOSError, - ClientPayloadError as ClientPayloadError, - ClientProxyConnectionError as ClientProxyConnectionError, - ClientResponseError as ClientResponseError, - ClientSSLError as ClientSSLError, - ContentTypeError as ContentTypeError, - InvalidURL as InvalidURL, - ServerConnectionError as ServerConnectionError, - ServerDisconnectedError as ServerDisconnectedError, - ServerFingerprintMismatch as ServerFingerprintMismatch, - ServerTimeoutError as ServerTimeoutError, - TooManyRedirects as TooManyRedirects, - WSServerHandshakeError as WSServerHandshakeError, -) -from .client_reqrep import ( - ClientRequest as ClientRequest, - ClientResponse as ClientResponse, - Fingerprint as Fingerprint, - RequestInfo as RequestInfo, - _merge_ssl_params, -) -from .client_ws import ClientWebSocketResponse as ClientWebSocketResponse -from .connector import ( - BaseConnector as BaseConnector, - NamedPipeConnector as NamedPipeConnector, - TCPConnector as TCPConnector, - UnixConnector as UnixConnector, -) -from .cookiejar import CookieJar -from .helpers import ( - _SENTINEL, - DEBUG, - BasicAuth, - TimeoutHandle, - ceil_timeout, - get_env_proxy_for_url, - get_running_loop, - method_must_be_empty_body, - sentinel, - strip_auth_from_url, -) -from .http import WS_KEY, HttpVersion, WebSocketReader, WebSocketWriter -from .http_websocket import WSHandshakeError, WSMessage, ws_ext_gen, ws_ext_parse -from .streams import FlowControlDataQueue -from .tracing import Trace, TraceConfig -from .typedefs import JSONEncoder, LooseCookies, LooseHeaders, StrOrURL - -__all__ = ( - # client_exceptions - "ClientConnectionError", - "ClientConnectorCertificateError", - "ClientConnectorError", - "ClientConnectorSSLError", - "ClientError", - "ClientHttpProxyError", - "ClientOSError", - "ClientPayloadError", - "ClientProxyConnectionError", - "ClientResponseError", - "ClientSSLError", - "ContentTypeError", - "InvalidURL", - "ServerConnectionError", - "ServerDisconnectedError", - "ServerFingerprintMismatch", - "ServerTimeoutError", - "TooManyRedirects", - "WSServerHandshakeError", - # client_reqrep - "ClientRequest", - "ClientResponse", - "Fingerprint", - "RequestInfo", - # connector - "BaseConnector", - "TCPConnector", - "UnixConnector", - "NamedPipeConnector", - # client_ws - "ClientWebSocketResponse", - # client - "ClientSession", - "ClientTimeout", - "request", -) - - -if TYPE_CHECKING: - from ssl import SSLContext -else: - SSLContext = None - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class ClientTimeout: - total: Optional[float] = None - connect: Optional[float] = None - sock_read: Optional[float] = None - sock_connect: Optional[float] = None - ceil_threshold: float = 5 - - # pool_queue_timeout: Optional[float] = None - # dns_resolution_timeout: Optional[float] = None - # socket_connect_timeout: Optional[float] = None - # connection_acquiring_timeout: Optional[float] = None - # new_connection_timeout: Optional[float] = None - # http_header_timeout: Optional[float] = None - # response_body_timeout: Optional[float] = None - - # to create a timeout specific for a single request, either - # - create a completely new one to overwrite the default - # - or use http://www.attrs.org/en/stable/api.html#attr.evolve - # to overwrite the defaults - - -# 5 Minute default read timeout -DEFAULT_TIMEOUT: Final[ClientTimeout] = ClientTimeout(total=5 * 60) - -_RetType = TypeVar("_RetType") -_CharsetResolver = Callable[[ClientResponse, bytes], str] - - -class ClientSession: - """First-class interface for making HTTP requests.""" - - ATTRS = frozenset( - [ - "_base_url", - "_source_traceback", - "_connector", - "requote_redirect_url", - "_loop", - "_cookie_jar", - "_connector_owner", - "_default_auth", - "_version", - "_json_serialize", - "_requote_redirect_url", - "_timeout", - "_raise_for_status", - "_auto_decompress", - "_trust_env", - "_default_headers", - "_skip_auto_headers", - "_request_class", - "_response_class", - "_ws_response_class", - "_trace_configs", - "_read_bufsize", - "_max_line_size", - "_max_field_size", - "_resolve_charset", - ] - ) - - _source_traceback: Optional[traceback.StackSummary] = None - _connector: Optional[BaseConnector] = None - - def __init__( - self, - base_url: Optional[StrOrURL] = None, - *, - connector: Optional[BaseConnector] = None, - loop: Optional[asyncio.AbstractEventLoop] = None, - cookies: Optional[LooseCookies] = None, - headers: Optional[LooseHeaders] = None, - skip_auto_headers: Optional[Iterable[str]] = None, - auth: Optional[BasicAuth] = None, - json_serialize: JSONEncoder = json.dumps, - request_class: Type[ClientRequest] = ClientRequest, - response_class: Type[ClientResponse] = ClientResponse, - ws_response_class: Type[ClientWebSocketResponse] = ClientWebSocketResponse, - version: HttpVersion = http.HttpVersion11, - cookie_jar: Optional[AbstractCookieJar] = None, - connector_owner: bool = True, - raise_for_status: Union[ - bool, Callable[[ClientResponse], Awaitable[None]] - ] = False, - read_timeout: Union[float, _SENTINEL] = sentinel, - conn_timeout: Optional[float] = None, - timeout: Union[object, ClientTimeout] = sentinel, - auto_decompress: bool = True, - trust_env: bool = False, - requote_redirect_url: bool = True, - trace_configs: Optional[List[TraceConfig]] = None, - read_bufsize: int = 2**16, - max_line_size: int = 8190, - max_field_size: int = 8190, - fallback_charset_resolver: _CharsetResolver = lambda r, b: "utf-8", - ) -> None: - if loop is None: - if connector is not None: - loop = connector._loop - - loop = get_running_loop(loop) - - if base_url is None or isinstance(base_url, URL): - self._base_url: Optional[URL] = base_url - else: - self._base_url = URL(base_url) - assert ( - self._base_url.origin() == self._base_url - ), "Only absolute URLs without path part are supported" - - if connector is None: - connector = TCPConnector(loop=loop) - - if connector._loop is not loop: - raise RuntimeError("Session and connector has to use same event loop") - - self._loop = loop - - if loop.get_debug(): - self._source_traceback = traceback.extract_stack(sys._getframe(1)) - - if cookie_jar is None: - cookie_jar = CookieJar(loop=loop) - self._cookie_jar = cookie_jar - - if cookies is not None: - self._cookie_jar.update_cookies(cookies) - - self._connector = connector - self._connector_owner = connector_owner - self._default_auth = auth - self._version = version - self._json_serialize = json_serialize - if timeout is sentinel or timeout is None: - self._timeout = DEFAULT_TIMEOUT - if read_timeout is not sentinel: - warnings.warn( - "read_timeout is deprecated, " "use timeout argument instead", - DeprecationWarning, - stacklevel=2, - ) - self._timeout = attr.evolve(self._timeout, total=read_timeout) - if conn_timeout is not None: - self._timeout = attr.evolve(self._timeout, connect=conn_timeout) - warnings.warn( - "conn_timeout is deprecated, " "use timeout argument instead", - DeprecationWarning, - stacklevel=2, - ) - else: - if not isinstance(timeout, ClientTimeout): - raise ValueError( - f"timeout parameter cannot be of {type(timeout)} type, " - "please use 'timeout=ClientTimeout(...)'", - ) - self._timeout = timeout - if read_timeout is not sentinel: - raise ValueError( - "read_timeout and timeout parameters " - "conflict, please setup " - "timeout.read" - ) - if conn_timeout is not None: - raise ValueError( - "conn_timeout and timeout parameters " - "conflict, please setup " - "timeout.connect" - ) - self._raise_for_status = raise_for_status - self._auto_decompress = auto_decompress - self._trust_env = trust_env - self._requote_redirect_url = requote_redirect_url - self._read_bufsize = read_bufsize - self._max_line_size = max_line_size - self._max_field_size = max_field_size - - # Convert to list of tuples - if headers: - real_headers: CIMultiDict[str] = CIMultiDict(headers) - else: - real_headers = CIMultiDict() - self._default_headers: CIMultiDict[str] = real_headers - if skip_auto_headers is not None: - self._skip_auto_headers = frozenset(istr(i) for i in skip_auto_headers) - else: - self._skip_auto_headers = frozenset() - - self._request_class = request_class - self._response_class = response_class - self._ws_response_class = ws_response_class - - self._trace_configs = trace_configs or [] - for trace_config in self._trace_configs: - trace_config.freeze() - - self._resolve_charset = fallback_charset_resolver - - def __init_subclass__(cls: Type["ClientSession"]) -> None: - warnings.warn( - "Inheritance class {} from ClientSession " - "is discouraged".format(cls.__name__), - DeprecationWarning, - stacklevel=2, - ) - - if DEBUG: - - def __setattr__(self, name: str, val: Any) -> None: - if name not in self.ATTRS: - warnings.warn( - "Setting custom ClientSession.{} attribute " - "is discouraged".format(name), - DeprecationWarning, - stacklevel=2, - ) - super().__setattr__(name, val) - - def __del__(self, _warnings: Any = warnings) -> None: - if not self.closed: - kwargs = {"source": self} - _warnings.warn( - f"Unclosed client session {self!r}", ResourceWarning, **kwargs - ) - context = {"client_session": self, "message": "Unclosed client session"} - if self._source_traceback is not None: - context["source_traceback"] = self._source_traceback - self._loop.call_exception_handler(context) - - def request( - self, method: str, url: StrOrURL, **kwargs: Any - ) -> "_RequestContextManager": - """Perform HTTP request.""" - return _RequestContextManager(self._request(method, url, **kwargs)) - - def _build_url(self, str_or_url: StrOrURL) -> URL: - url = URL(str_or_url) - if self._base_url is None: - return url - else: - assert not url.is_absolute() and url.path.startswith("/") - return self._base_url.join(url) - - async def _request( - self, - method: str, - str_or_url: StrOrURL, - *, - params: Optional[Mapping[str, str]] = None, - data: Any = None, - json: Any = None, - cookies: Optional[LooseCookies] = None, - headers: Optional[LooseHeaders] = None, - skip_auto_headers: Optional[Iterable[str]] = None, - auth: Optional[BasicAuth] = None, - allow_redirects: bool = True, - max_redirects: int = 10, - compress: Optional[str] = None, - chunked: Optional[bool] = None, - expect100: bool = False, - raise_for_status: Union[ - None, bool, Callable[[ClientResponse], Awaitable[None]] - ] = None, - read_until_eof: bool = True, - proxy: Optional[StrOrURL] = None, - proxy_auth: Optional[BasicAuth] = None, - timeout: Union[ClientTimeout, _SENTINEL] = sentinel, - verify_ssl: Optional[bool] = None, - fingerprint: Optional[bytes] = None, - ssl_context: Optional[SSLContext] = None, - ssl: Union[SSLContext, bool, Fingerprint] = True, - server_hostname: Optional[str] = None, - proxy_headers: Optional[LooseHeaders] = None, - trace_request_ctx: Optional[SimpleNamespace] = None, - read_bufsize: Optional[int] = None, - auto_decompress: Optional[bool] = None, - max_line_size: Optional[int] = None, - max_field_size: Optional[int] = None, - ) -> ClientResponse: - - # NOTE: timeout clamps existing connect and read timeouts. We cannot - # set the default to None because we need to detect if the user wants - # to use the existing timeouts by setting timeout to None. - - if self.closed: - raise RuntimeError("Session is closed") - - ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint) - - if data is not None and json is not None: - raise ValueError( - "data and json parameters can not be used at the same time" - ) - elif json is not None: - data = payload.JsonPayload(json, dumps=self._json_serialize) - - if not isinstance(chunked, bool) and chunked is not None: - warnings.warn("Chunk size is deprecated #1615", DeprecationWarning) - - redirects = 0 - history = [] - version = self._version - params = params or {} - - # Merge with default headers and transform to CIMultiDict - headers = self._prepare_headers(headers) - proxy_headers = self._prepare_headers(proxy_headers) - - try: - url = self._build_url(str_or_url) - except ValueError as e: - raise InvalidURL(str_or_url) from e - - skip_headers = set(self._skip_auto_headers) - if skip_auto_headers is not None: - for i in skip_auto_headers: - skip_headers.add(istr(i)) - - if proxy is not None: - try: - proxy = URL(proxy) - except ValueError as e: - raise InvalidURL(proxy) from e - - if timeout is sentinel: - real_timeout: ClientTimeout = self._timeout - else: - if not isinstance(timeout, ClientTimeout): - real_timeout = ClientTimeout(total=timeout) - else: - real_timeout = timeout - # timeout is cumulative for all request operations - # (request, redirects, responses, data consuming) - tm = TimeoutHandle( - self._loop, real_timeout.total, ceil_threshold=real_timeout.ceil_threshold - ) - handle = tm.start() - - if read_bufsize is None: - read_bufsize = self._read_bufsize - - if auto_decompress is None: - auto_decompress = self._auto_decompress - - if max_line_size is None: - max_line_size = self._max_line_size - - if max_field_size is None: - max_field_size = self._max_field_size - - traces = [ - Trace( - self, - trace_config, - trace_config.trace_config_ctx(trace_request_ctx=trace_request_ctx), - ) - for trace_config in self._trace_configs - ] - - for trace in traces: - await trace.send_request_start(method, url.update_query(params), headers) - - timer = tm.timer() - try: - with timer: - while True: - url, auth_from_url = strip_auth_from_url(url) - if auth and auth_from_url: - raise ValueError( - "Cannot combine AUTH argument with " - "credentials encoded in URL" - ) - - if auth is None: - auth = auth_from_url - if auth is None: - auth = self._default_auth - # It would be confusing if we support explicit - # Authorization header with auth argument - if ( - headers is not None - and auth is not None - and hdrs.AUTHORIZATION in headers - ): - raise ValueError( - "Cannot combine AUTHORIZATION header " - "with AUTH argument or credentials " - "encoded in URL" - ) - - all_cookies = self._cookie_jar.filter_cookies(url) - - if cookies is not None: - tmp_cookie_jar = CookieJar() - tmp_cookie_jar.update_cookies(cookies) - req_cookies = tmp_cookie_jar.filter_cookies(url) - if req_cookies: - all_cookies.load(req_cookies) - - if proxy is not None: - proxy = URL(proxy) - elif self._trust_env: - with suppress(LookupError): - proxy, proxy_auth = get_env_proxy_for_url(url) - - req = self._request_class( - method, - url, - params=params, - headers=headers, - skip_auto_headers=skip_headers, - data=data, - cookies=all_cookies, - auth=auth, - version=version, - compress=compress, - chunked=chunked, - expect100=expect100, - loop=self._loop, - response_class=self._response_class, - proxy=proxy, - proxy_auth=proxy_auth, - timer=timer, - session=self, - ssl=ssl if ssl is not None else True, - server_hostname=server_hostname, - proxy_headers=proxy_headers, - traces=traces, - trust_env=self.trust_env, - ) - - # connection timeout - try: - async with ceil_timeout( - real_timeout.connect, - ceil_threshold=real_timeout.ceil_threshold, - ): - assert self._connector is not None - conn = await self._connector.connect( - req, traces=traces, timeout=real_timeout - ) - except asyncio.TimeoutError as exc: - raise ServerTimeoutError( - "Connection timeout " "to host {}".format(url) - ) from exc - - assert conn.transport is not None - - assert conn.protocol is not None - conn.protocol.set_response_params( - timer=timer, - skip_payload=method_must_be_empty_body(method), - read_until_eof=read_until_eof, - auto_decompress=auto_decompress, - read_timeout=real_timeout.sock_read, - read_bufsize=read_bufsize, - timeout_ceil_threshold=self._connector._timeout_ceil_threshold, - max_line_size=max_line_size, - max_field_size=max_field_size, - ) - - try: - try: - resp = await req.send(conn) - try: - await resp.start(conn) - except BaseException: - resp.close() - raise - except BaseException: - conn.close() - raise - except ClientError: - raise - except OSError as exc: - if exc.errno is None and isinstance(exc, asyncio.TimeoutError): - raise - raise ClientOSError(*exc.args) from exc - - self._cookie_jar.update_cookies(resp.cookies, resp.url) - - # redirects - if resp.status in (301, 302, 303, 307, 308) and allow_redirects: - - for trace in traces: - await trace.send_request_redirect( - method, url.update_query(params), headers, resp - ) - - redirects += 1 - history.append(resp) - if max_redirects and redirects >= max_redirects: - resp.close() - raise TooManyRedirects( - history[0].request_info, tuple(history) - ) - - # For 301 and 302, mimic IE, now changed in RFC - # https://github.com/kennethreitz/requests/pull/269 - if (resp.status == 303 and resp.method != hdrs.METH_HEAD) or ( - resp.status in (301, 302) and resp.method == hdrs.METH_POST - ): - method = hdrs.METH_GET - data = None - if headers.get(hdrs.CONTENT_LENGTH): - headers.pop(hdrs.CONTENT_LENGTH) - - r_url = resp.headers.get(hdrs.LOCATION) or resp.headers.get( - hdrs.URI - ) - if r_url is None: - # see github.com/aio-libs/aiohttp/issues/2022 - break - else: - # reading from correct redirection - # response is forbidden - resp.release() - - try: - parsed_url = URL( - r_url, encoded=not self._requote_redirect_url - ) - - except ValueError as e: - raise InvalidURL(r_url) from e - - scheme = parsed_url.scheme - if scheme not in ("http", "https", ""): - resp.close() - raise ValueError("Can redirect only to http or https") - elif not scheme: - parsed_url = url.join(parsed_url) - - if url.origin() != parsed_url.origin(): - auth = None - headers.pop(hdrs.AUTHORIZATION, None) - - url = parsed_url - params = {} - resp.release() - continue - - break - - # check response status - if raise_for_status is None: - raise_for_status = self._raise_for_status - - if raise_for_status is None: - pass - elif callable(raise_for_status): - await raise_for_status(resp) - elif raise_for_status: - resp.raise_for_status() - - # register connection - if handle is not None: - if resp.connection is not None: - resp.connection.add_callback(handle.cancel) - else: - handle.cancel() - - resp._history = tuple(history) - - for trace in traces: - await trace.send_request_end( - method, url.update_query(params), headers, resp - ) - return resp - - except BaseException as e: - # cleanup timer - tm.close() - if handle: - handle.cancel() - handle = None - - for trace in traces: - await trace.send_request_exception( - method, url.update_query(params), headers, e - ) - raise - - def ws_connect( - self, - url: StrOrURL, - *, - method: str = hdrs.METH_GET, - protocols: Iterable[str] = (), - timeout: float = 10.0, - receive_timeout: Optional[float] = None, - autoclose: bool = True, - autoping: bool = True, - heartbeat: Optional[float] = None, - auth: Optional[BasicAuth] = None, - origin: Optional[str] = None, - params: Optional[Mapping[str, str]] = None, - headers: Optional[LooseHeaders] = None, - proxy: Optional[StrOrURL] = None, - proxy_auth: Optional[BasicAuth] = None, - ssl: Union[SSLContext, bool, None, Fingerprint] = True, - verify_ssl: Optional[bool] = None, - fingerprint: Optional[bytes] = None, - ssl_context: Optional[SSLContext] = None, - proxy_headers: Optional[LooseHeaders] = None, - compress: int = 0, - max_msg_size: int = 4 * 1024 * 1024, - ) -> "_WSRequestContextManager": - """Initiate websocket connection.""" - return _WSRequestContextManager( - self._ws_connect( - url, - method=method, - protocols=protocols, - timeout=timeout, - receive_timeout=receive_timeout, - autoclose=autoclose, - autoping=autoping, - heartbeat=heartbeat, - auth=auth, - origin=origin, - params=params, - headers=headers, - proxy=proxy, - proxy_auth=proxy_auth, - ssl=ssl, - verify_ssl=verify_ssl, - fingerprint=fingerprint, - ssl_context=ssl_context, - proxy_headers=proxy_headers, - compress=compress, - max_msg_size=max_msg_size, - ) - ) - - async def _ws_connect( - self, - url: StrOrURL, - *, - method: str = hdrs.METH_GET, - protocols: Iterable[str] = (), - timeout: float = 10.0, - receive_timeout: Optional[float] = None, - autoclose: bool = True, - autoping: bool = True, - heartbeat: Optional[float] = None, - auth: Optional[BasicAuth] = None, - origin: Optional[str] = None, - params: Optional[Mapping[str, str]] = None, - headers: Optional[LooseHeaders] = None, - proxy: Optional[StrOrURL] = None, - proxy_auth: Optional[BasicAuth] = None, - ssl: Optional[Union[SSLContext, bool, Fingerprint]] = True, - verify_ssl: Optional[bool] = None, - fingerprint: Optional[bytes] = None, - ssl_context: Optional[SSLContext] = None, - proxy_headers: Optional[LooseHeaders] = None, - compress: int = 0, - max_msg_size: int = 4 * 1024 * 1024, - ) -> ClientWebSocketResponse: - - if headers is None: - real_headers: CIMultiDict[str] = CIMultiDict() - else: - real_headers = CIMultiDict(headers) - - default_headers = { - hdrs.UPGRADE: "websocket", - hdrs.CONNECTION: "Upgrade", - hdrs.SEC_WEBSOCKET_VERSION: "13", - } - - for key, value in default_headers.items(): - real_headers.setdefault(key, value) - - sec_key = base64.b64encode(os.urandom(16)) - real_headers[hdrs.SEC_WEBSOCKET_KEY] = sec_key.decode() - - if protocols: - real_headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = ",".join(protocols) - if origin is not None: - real_headers[hdrs.ORIGIN] = origin - if compress: - extstr = ws_ext_gen(compress=compress) - real_headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = extstr - - # For the sake of backward compatibility, if user passes in None, convert it to True - if ssl is None: - ssl = True - ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint) - - # send request - resp = await self.request( - method, - url, - params=params, - headers=real_headers, - read_until_eof=False, - auth=auth, - proxy=proxy, - proxy_auth=proxy_auth, - ssl=ssl, - proxy_headers=proxy_headers, - ) - - try: - # check handshake - if resp.status != 101: - raise WSServerHandshakeError( - resp.request_info, - resp.history, - message="Invalid response status", - status=resp.status, - headers=resp.headers, - ) - - if resp.headers.get(hdrs.UPGRADE, "").lower() != "websocket": - raise WSServerHandshakeError( - resp.request_info, - resp.history, - message="Invalid upgrade header", - status=resp.status, - headers=resp.headers, - ) - - if resp.headers.get(hdrs.CONNECTION, "").lower() != "upgrade": - raise WSServerHandshakeError( - resp.request_info, - resp.history, - message="Invalid connection header", - status=resp.status, - headers=resp.headers, - ) - - # key calculation - r_key = resp.headers.get(hdrs.SEC_WEBSOCKET_ACCEPT, "") - match = base64.b64encode(hashlib.sha1(sec_key + WS_KEY).digest()).decode() - if r_key != match: - raise WSServerHandshakeError( - resp.request_info, - resp.history, - message="Invalid challenge response", - status=resp.status, - headers=resp.headers, - ) - - # websocket protocol - protocol = None - if protocols and hdrs.SEC_WEBSOCKET_PROTOCOL in resp.headers: - resp_protocols = [ - proto.strip() - for proto in resp.headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",") - ] - - for proto in resp_protocols: - if proto in protocols: - protocol = proto - break - - # websocket compress - notakeover = False - if compress: - compress_hdrs = resp.headers.get(hdrs.SEC_WEBSOCKET_EXTENSIONS) - if compress_hdrs: - try: - compress, notakeover = ws_ext_parse(compress_hdrs) - except WSHandshakeError as exc: - raise WSServerHandshakeError( - resp.request_info, - resp.history, - message=exc.args[0], - status=resp.status, - headers=resp.headers, - ) from exc - else: - compress = 0 - notakeover = False - - conn = resp.connection - assert conn is not None - conn_proto = conn.protocol - assert conn_proto is not None - transport = conn.transport - assert transport is not None - reader: FlowControlDataQueue[WSMessage] = FlowControlDataQueue( - conn_proto, 2**16, loop=self._loop - ) - conn_proto.set_parser(WebSocketReader(reader, max_msg_size), reader) - writer = WebSocketWriter( - conn_proto, - transport, - use_mask=True, - compress=compress, - notakeover=notakeover, - ) - except BaseException: - resp.close() - raise - else: - return self._ws_response_class( - reader, - writer, - protocol, - resp, - timeout, - autoclose, - autoping, - self._loop, - receive_timeout=receive_timeout, - heartbeat=heartbeat, - compress=compress, - client_notakeover=notakeover, - ) - - def _prepare_headers(self, headers: Optional[LooseHeaders]) -> "CIMultiDict[str]": - """Add default headers and transform it to CIMultiDict""" - # Convert headers to MultiDict - result = CIMultiDict(self._default_headers) - if headers: - if not isinstance(headers, (MultiDictProxy, MultiDict)): - headers = CIMultiDict(headers) - added_names: Set[str] = set() - for key, value in headers.items(): - if key in added_names: - result.add(key, value) - else: - result[key] = value - added_names.add(key) - return result - - def get( - self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any - ) -> "_RequestContextManager": - """Perform HTTP GET request.""" - return _RequestContextManager( - self._request(hdrs.METH_GET, url, allow_redirects=allow_redirects, **kwargs) - ) - - def options( - self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any - ) -> "_RequestContextManager": - """Perform HTTP OPTIONS request.""" - return _RequestContextManager( - self._request( - hdrs.METH_OPTIONS, url, allow_redirects=allow_redirects, **kwargs - ) - ) - - def head( - self, url: StrOrURL, *, allow_redirects: bool = False, **kwargs: Any - ) -> "_RequestContextManager": - """Perform HTTP HEAD request.""" - return _RequestContextManager( - self._request( - hdrs.METH_HEAD, url, allow_redirects=allow_redirects, **kwargs - ) - ) - - def post( - self, url: StrOrURL, *, data: Any = None, **kwargs: Any - ) -> "_RequestContextManager": - """Perform HTTP POST request.""" - return _RequestContextManager( - self._request(hdrs.METH_POST, url, data=data, **kwargs) - ) - - def put( - self, url: StrOrURL, *, data: Any = None, **kwargs: Any - ) -> "_RequestContextManager": - """Perform HTTP PUT request.""" - return _RequestContextManager( - self._request(hdrs.METH_PUT, url, data=data, **kwargs) - ) - - def patch( - self, url: StrOrURL, *, data: Any = None, **kwargs: Any - ) -> "_RequestContextManager": - """Perform HTTP PATCH request.""" - return _RequestContextManager( - self._request(hdrs.METH_PATCH, url, data=data, **kwargs) - ) - - def delete(self, url: StrOrURL, **kwargs: Any) -> "_RequestContextManager": - """Perform HTTP DELETE request.""" - return _RequestContextManager(self._request(hdrs.METH_DELETE, url, **kwargs)) - - async def close(self) -> None: - """Close underlying connector. - - Release all acquired resources. - """ - if not self.closed: - if self._connector is not None and self._connector_owner: - await self._connector.close() - self._connector = None - - @property - def closed(self) -> bool: - """Is client session closed. - - A readonly property. - """ - return self._connector is None or self._connector.closed - - @property - def connector(self) -> Optional[BaseConnector]: - """Connector instance used for the session.""" - return self._connector - - @property - def cookie_jar(self) -> AbstractCookieJar: - """The session cookies.""" - return self._cookie_jar - - @property - def version(self) -> Tuple[int, int]: - """The session HTTP protocol version.""" - return self._version - - @property - def requote_redirect_url(self) -> bool: - """Do URL requoting on redirection handling.""" - return self._requote_redirect_url - - @requote_redirect_url.setter - def requote_redirect_url(self, val: bool) -> None: - """Do URL requoting on redirection handling.""" - warnings.warn( - "session.requote_redirect_url modification " "is deprecated #2778", - DeprecationWarning, - stacklevel=2, - ) - self._requote_redirect_url = val - - @property - def loop(self) -> asyncio.AbstractEventLoop: - """Session's loop.""" - warnings.warn( - "client.loop property is deprecated", DeprecationWarning, stacklevel=2 - ) - return self._loop - - @property - def timeout(self) -> ClientTimeout: - """Timeout for the session.""" - return self._timeout - - @property - def headers(self) -> "CIMultiDict[str]": - """The default headers of the client session.""" - return self._default_headers - - @property - def skip_auto_headers(self) -> FrozenSet[istr]: - """Headers for which autogeneration should be skipped""" - return self._skip_auto_headers - - @property - def auth(self) -> Optional[BasicAuth]: - """An object that represents HTTP Basic Authorization""" - return self._default_auth - - @property - def json_serialize(self) -> JSONEncoder: - """Json serializer callable""" - return self._json_serialize - - @property - def connector_owner(self) -> bool: - """Should connector be closed on session closing""" - return self._connector_owner - - @property - def raise_for_status( - self, - ) -> Union[bool, Callable[[ClientResponse], Awaitable[None]]]: - """Should `ClientResponse.raise_for_status()` be called for each response.""" - return self._raise_for_status - - @property - def auto_decompress(self) -> bool: - """Should the body response be automatically decompressed.""" - return self._auto_decompress - - @property - def trust_env(self) -> bool: - """ - Should proxies information from environment or netrc be trusted. - - Information is from HTTP_PROXY / HTTPS_PROXY environment variables - or ~/.netrc file if present. - """ - return self._trust_env - - @property - def trace_configs(self) -> List[TraceConfig]: - """A list of TraceConfig instances used for client tracing""" - return self._trace_configs - - def detach(self) -> None: - """Detach connector from session without closing the former. - - Session is switched to closed state anyway. - """ - self._connector = None - - def __enter__(self) -> None: - raise TypeError("Use async with instead") - - def __exit__( - self, - exc_type: Optional[Type[BaseException]], - exc_val: Optional[BaseException], - exc_tb: Optional[TracebackType], - ) -> None: - # __exit__ should exist in pair with __enter__ but never executed - pass # pragma: no cover - - async def __aenter__(self) -> "ClientSession": - return self - - async def __aexit__( - self, - exc_type: Optional[Type[BaseException]], - exc_val: Optional[BaseException], - exc_tb: Optional[TracebackType], - ) -> None: - await self.close() - - -class _BaseRequestContextManager(Coroutine[Any, Any, _RetType], Generic[_RetType]): - - __slots__ = ("_coro", "_resp") - - def __init__(self, coro: Coroutine["asyncio.Future[Any]", None, _RetType]) -> None: - self._coro = coro - - def send(self, arg: None) -> "asyncio.Future[Any]": - return self._coro.send(arg) - - def throw(self, *args: Any, **kwargs: Any) -> "asyncio.Future[Any]": - return self._coro.throw(*args, **kwargs) - - def close(self) -> None: - return self._coro.close() - - def __await__(self) -> Generator[Any, None, _RetType]: - ret = self._coro.__await__() - return ret - - def __iter__(self) -> Generator[Any, None, _RetType]: - return self.__await__() - - async def __aenter__(self) -> _RetType: - self._resp = await self._coro - return self._resp - - -class _RequestContextManager(_BaseRequestContextManager[ClientResponse]): - __slots__ = () - - async def __aexit__( - self, - exc_type: Optional[Type[BaseException]], - exc: Optional[BaseException], - tb: Optional[TracebackType], - ) -> None: - # We're basing behavior on the exception as it can be caused by - # user code unrelated to the status of the connection. If you - # would like to close a connection you must do that - # explicitly. Otherwise connection error handling should kick in - # and close/recycle the connection as required. - self._resp.release() - await self._resp.wait_for_close() - - -class _WSRequestContextManager(_BaseRequestContextManager[ClientWebSocketResponse]): - __slots__ = () - - async def __aexit__( - self, - exc_type: Optional[Type[BaseException]], - exc: Optional[BaseException], - tb: Optional[TracebackType], - ) -> None: - await self._resp.close() - - -class _SessionRequestContextManager: - - __slots__ = ("_coro", "_resp", "_session") - - def __init__( - self, - coro: Coroutine["asyncio.Future[Any]", None, ClientResponse], - session: ClientSession, - ) -> None: - self._coro = coro - self._resp: Optional[ClientResponse] = None - self._session = session - - async def __aenter__(self) -> ClientResponse: - try: - self._resp = await self._coro - except BaseException: - await self._session.close() - raise - else: - return self._resp - - async def __aexit__( - self, - exc_type: Optional[Type[BaseException]], - exc: Optional[BaseException], - tb: Optional[TracebackType], - ) -> None: - assert self._resp is not None - self._resp.close() - await self._session.close() - - -def request( - method: str, - url: StrOrURL, - *, - params: Optional[Mapping[str, str]] = None, - data: Any = None, - json: Any = None, - headers: Optional[LooseHeaders] = None, - skip_auto_headers: Optional[Iterable[str]] = None, - auth: Optional[BasicAuth] = None, - allow_redirects: bool = True, - max_redirects: int = 10, - compress: Optional[str] = None, - chunked: Optional[bool] = None, - expect100: bool = False, - raise_for_status: Optional[bool] = None, - read_until_eof: bool = True, - proxy: Optional[StrOrURL] = None, - proxy_auth: Optional[BasicAuth] = None, - timeout: Union[ClientTimeout, object] = sentinel, - cookies: Optional[LooseCookies] = None, - version: HttpVersion = http.HttpVersion11, - connector: Optional[BaseConnector] = None, - read_bufsize: Optional[int] = None, - loop: Optional[asyncio.AbstractEventLoop] = None, - max_line_size: int = 8190, - max_field_size: int = 8190, -) -> _SessionRequestContextManager: - """Constructs and sends a request. - - Returns response object. - method - HTTP method - url - request url - params - (optional) Dictionary or bytes to be sent in the query - string of the new request - data - (optional) Dictionary, bytes, or file-like object to - send in the body of the request - json - (optional) Any json compatible python object - headers - (optional) Dictionary of HTTP Headers to send with - the request - cookies - (optional) Dict object to send with the request - auth - (optional) BasicAuth named tuple represent HTTP Basic Auth - auth - aiohttp.helpers.BasicAuth - allow_redirects - (optional) If set to False, do not follow - redirects - version - Request HTTP version. - compress - Set to True if request has to be compressed - with deflate encoding. - chunked - Set to chunk size for chunked transfer encoding. - expect100 - Expect 100-continue response from server. - connector - BaseConnector sub-class instance to support - connection pooling. - read_until_eof - Read response until eof if response - does not have Content-Length header. - loop - Optional event loop. - timeout - Optional ClientTimeout settings structure, 5min - total timeout by default. - Usage:: - >>> import aiohttp - >>> resp = await aiohttp.request('GET', 'http://python.org/') - >>> resp - - >>> data = await resp.read() - """ - connector_owner = False - if connector is None: - connector_owner = True - connector = TCPConnector(loop=loop, force_close=True) - - session = ClientSession( - loop=loop, - cookies=cookies, - version=version, - timeout=timeout, - connector=connector, - connector_owner=connector_owner, - ) - - return _SessionRequestContextManager( - session._request( - method, - url, - params=params, - data=data, - json=json, - headers=headers, - skip_auto_headers=skip_auto_headers, - auth=auth, - allow_redirects=allow_redirects, - max_redirects=max_redirects, - compress=compress, - chunked=chunked, - expect100=expect100, - raise_for_status=raise_for_status, - read_until_eof=read_until_eof, - proxy=proxy, - proxy_auth=proxy_auth, - read_bufsize=read_bufsize, - max_line_size=max_line_size, - max_field_size=max_field_size, - ), - session, - ) diff --git a/.venv/Lib/site-packages/aiohttp/client_exceptions.py b/.venv/Lib/site-packages/aiohttp/client_exceptions.py deleted file mode 100644 index 9b6e442..0000000 --- a/.venv/Lib/site-packages/aiohttp/client_exceptions.py +++ /dev/null @@ -1,346 +0,0 @@ -"""HTTP related errors.""" - -import asyncio -import warnings -from typing import TYPE_CHECKING, Any, Optional, Tuple, Union - -from .http_parser import RawResponseMessage -from .typedefs import LooseHeaders - -try: - import ssl - - SSLContext = ssl.SSLContext -except ImportError: # pragma: no cover - ssl = SSLContext = None # type: ignore[assignment] - - -if TYPE_CHECKING: - from .client_reqrep import ClientResponse, ConnectionKey, Fingerprint, RequestInfo -else: - RequestInfo = ClientResponse = ConnectionKey = None - -__all__ = ( - "ClientError", - "ClientConnectionError", - "ClientOSError", - "ClientConnectorError", - "ClientProxyConnectionError", - "ClientSSLError", - "ClientConnectorSSLError", - "ClientConnectorCertificateError", - "ServerConnectionError", - "ServerTimeoutError", - "ServerDisconnectedError", - "ServerFingerprintMismatch", - "ClientResponseError", - "ClientHttpProxyError", - "WSServerHandshakeError", - "ContentTypeError", - "ClientPayloadError", - "InvalidURL", -) - - -class ClientError(Exception): - """Base class for client connection errors.""" - - -class ClientResponseError(ClientError): - """Base class for exceptions that occur after getting a response. - - request_info: An instance of RequestInfo. - history: A sequence of responses, if redirects occurred. - status: HTTP status code. - message: Error message. - headers: Response headers. - """ - - def __init__( - self, - request_info: RequestInfo, - history: Tuple[ClientResponse, ...], - *, - code: Optional[int] = None, - status: Optional[int] = None, - message: str = "", - headers: Optional[LooseHeaders] = None, - ) -> None: - self.request_info = request_info - if code is not None: - if status is not None: - raise ValueError( - "Both code and status arguments are provided; " - "code is deprecated, use status instead" - ) - warnings.warn( - "code argument is deprecated, use status instead", - DeprecationWarning, - stacklevel=2, - ) - if status is not None: - self.status = status - elif code is not None: - self.status = code - else: - self.status = 0 - self.message = message - self.headers = headers - self.history = history - self.args = (request_info, history) - - def __str__(self) -> str: - return "{}, message={!r}, url={!r}".format( - self.status, - self.message, - self.request_info.real_url, - ) - - def __repr__(self) -> str: - args = f"{self.request_info!r}, {self.history!r}" - if self.status != 0: - args += f", status={self.status!r}" - if self.message != "": - args += f", message={self.message!r}" - if self.headers is not None: - args += f", headers={self.headers!r}" - return f"{type(self).__name__}({args})" - - @property - def code(self) -> int: - warnings.warn( - "code property is deprecated, use status instead", - DeprecationWarning, - stacklevel=2, - ) - return self.status - - @code.setter - def code(self, value: int) -> None: - warnings.warn( - "code property is deprecated, use status instead", - DeprecationWarning, - stacklevel=2, - ) - self.status = value - - -class ContentTypeError(ClientResponseError): - """ContentType found is not valid.""" - - -class WSServerHandshakeError(ClientResponseError): - """websocket server handshake error.""" - - -class ClientHttpProxyError(ClientResponseError): - """HTTP proxy error. - - Raised in :class:`aiohttp.connector.TCPConnector` if - proxy responds with status other than ``200 OK`` - on ``CONNECT`` request. - """ - - -class TooManyRedirects(ClientResponseError): - """Client was redirected too many times.""" - - -class ClientConnectionError(ClientError): - """Base class for client socket errors.""" - - -class ClientOSError(ClientConnectionError, OSError): - """OSError error.""" - - -class ClientConnectorError(ClientOSError): - """Client connector error. - - Raised in :class:`aiohttp.connector.TCPConnector` if - a connection can not be established. - """ - - def __init__(self, connection_key: ConnectionKey, os_error: OSError) -> None: - self._conn_key = connection_key - self._os_error = os_error - super().__init__(os_error.errno, os_error.strerror) - self.args = (connection_key, os_error) - - @property - def os_error(self) -> OSError: - return self._os_error - - @property - def host(self) -> str: - return self._conn_key.host - - @property - def port(self) -> Optional[int]: - return self._conn_key.port - - @property - def ssl(self) -> Union[SSLContext, bool, "Fingerprint"]: - return self._conn_key.ssl - - def __str__(self) -> str: - return "Cannot connect to host {0.host}:{0.port} ssl:{1} [{2}]".format( - self, "default" if self.ssl is True else self.ssl, self.strerror - ) - - # OSError.__reduce__ does too much black magick - __reduce__ = BaseException.__reduce__ - - -class ClientProxyConnectionError(ClientConnectorError): - """Proxy connection error. - - Raised in :class:`aiohttp.connector.TCPConnector` if - connection to proxy can not be established. - """ - - -class UnixClientConnectorError(ClientConnectorError): - """Unix connector error. - - Raised in :py:class:`aiohttp.connector.UnixConnector` - if connection to unix socket can not be established. - """ - - def __init__( - self, path: str, connection_key: ConnectionKey, os_error: OSError - ) -> None: - self._path = path - super().__init__(connection_key, os_error) - - @property - def path(self) -> str: - return self._path - - def __str__(self) -> str: - return "Cannot connect to unix socket {0.path} ssl:{1} [{2}]".format( - self, "default" if self.ssl is True else self.ssl, self.strerror - ) - - -class ServerConnectionError(ClientConnectionError): - """Server connection errors.""" - - -class ServerDisconnectedError(ServerConnectionError): - """Server disconnected.""" - - def __init__(self, message: Union[RawResponseMessage, str, None] = None) -> None: - if message is None: - message = "Server disconnected" - - self.args = (message,) - self.message = message - - -class ServerTimeoutError(ServerConnectionError, asyncio.TimeoutError): - """Server timeout error.""" - - -class ServerFingerprintMismatch(ServerConnectionError): - """SSL certificate does not match expected fingerprint.""" - - def __init__(self, expected: bytes, got: bytes, host: str, port: int) -> None: - self.expected = expected - self.got = got - self.host = host - self.port = port - self.args = (expected, got, host, port) - - def __repr__(self) -> str: - return "<{} expected={!r} got={!r} host={!r} port={!r}>".format( - self.__class__.__name__, self.expected, self.got, self.host, self.port - ) - - -class ClientPayloadError(ClientError): - """Response payload error.""" - - -class InvalidURL(ClientError, ValueError): - """Invalid URL. - - URL used for fetching is malformed, e.g. it doesn't contains host - part. - """ - - # Derive from ValueError for backward compatibility - - def __init__(self, url: Any) -> None: - # The type of url is not yarl.URL because the exception can be raised - # on URL(url) call - super().__init__(url) - - @property - def url(self) -> Any: - return self.args[0] - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} {self.url}>" - - -class ClientSSLError(ClientConnectorError): - """Base error for ssl.*Errors.""" - - -if ssl is not None: - cert_errors = (ssl.CertificateError,) - cert_errors_bases = ( - ClientSSLError, - ssl.CertificateError, - ) - - ssl_errors = (ssl.SSLError,) - ssl_error_bases = (ClientSSLError, ssl.SSLError) -else: # pragma: no cover - cert_errors = tuple() - cert_errors_bases = ( - ClientSSLError, - ValueError, - ) - - ssl_errors = tuple() - ssl_error_bases = (ClientSSLError,) - - -class ClientConnectorSSLError(*ssl_error_bases): # type: ignore[misc] - """Response ssl error.""" - - -class ClientConnectorCertificateError(*cert_errors_bases): # type: ignore[misc] - """Response certificate error.""" - - def __init__( - self, connection_key: ConnectionKey, certificate_error: Exception - ) -> None: - self._conn_key = connection_key - self._certificate_error = certificate_error - self.args = (connection_key, certificate_error) - - @property - def certificate_error(self) -> Exception: - return self._certificate_error - - @property - def host(self) -> str: - return self._conn_key.host - - @property - def port(self) -> Optional[int]: - return self._conn_key.port - - @property - def ssl(self) -> bool: - return self._conn_key.is_ssl - - def __str__(self) -> str: - return ( - "Cannot connect to host {0.host}:{0.port} ssl:{0.ssl} " - "[{0.certificate_error.__class__.__name__}: " - "{0.certificate_error.args}]".format(self) - ) diff --git a/.venv/Lib/site-packages/aiohttp/client_proto.py b/.venv/Lib/site-packages/aiohttp/client_proto.py deleted file mode 100644 index ca99808..0000000 --- a/.venv/Lib/site-packages/aiohttp/client_proto.py +++ /dev/null @@ -1,264 +0,0 @@ -import asyncio -from contextlib import suppress -from typing import Any, Optional, Tuple - -from .base_protocol import BaseProtocol -from .client_exceptions import ( - ClientOSError, - ClientPayloadError, - ServerDisconnectedError, - ServerTimeoutError, -) -from .helpers import BaseTimerContext, status_code_must_be_empty_body -from .http import HttpResponseParser, RawResponseMessage -from .streams import EMPTY_PAYLOAD, DataQueue, StreamReader - - -class ResponseHandler(BaseProtocol, DataQueue[Tuple[RawResponseMessage, StreamReader]]): - """Helper class to adapt between Protocol and StreamReader.""" - - def __init__(self, loop: asyncio.AbstractEventLoop) -> None: - BaseProtocol.__init__(self, loop=loop) - DataQueue.__init__(self, loop) - - self._should_close = False - - self._payload: Optional[StreamReader] = None - self._skip_payload = False - self._payload_parser = None - - self._timer = None - - self._tail = b"" - self._upgraded = False - self._parser: Optional[HttpResponseParser] = None - - self._read_timeout: Optional[float] = None - self._read_timeout_handle: Optional[asyncio.TimerHandle] = None - - self._timeout_ceil_threshold: Optional[float] = 5 - - @property - def upgraded(self) -> bool: - return self._upgraded - - @property - def should_close(self) -> bool: - if self._payload is not None and not self._payload.is_eof() or self._upgraded: - return True - - return ( - self._should_close - or self._upgraded - or self.exception() is not None - or self._payload_parser is not None - or len(self) > 0 - or bool(self._tail) - ) - - def force_close(self) -> None: - self._should_close = True - - def close(self) -> None: - transport = self.transport - if transport is not None: - transport.close() - self.transport = None - self._payload = None - self._drop_timeout() - - def is_connected(self) -> bool: - return self.transport is not None and not self.transport.is_closing() - - def connection_lost(self, exc: Optional[BaseException]) -> None: - self._drop_timeout() - - if self._payload_parser is not None: - with suppress(Exception): - self._payload_parser.feed_eof() - - uncompleted = None - if self._parser is not None: - try: - uncompleted = self._parser.feed_eof() - except Exception as e: - if self._payload is not None: - exc = ClientPayloadError("Response payload is not completed") - exc.__cause__ = e - self._payload.set_exception(exc) - - if not self.is_eof(): - if isinstance(exc, OSError): - exc = ClientOSError(*exc.args) - if exc is None: - exc = ServerDisconnectedError(uncompleted) - # assigns self._should_close to True as side effect, - # we do it anyway below - self.set_exception(exc) - - self._should_close = True - self._parser = None - self._payload = None - self._payload_parser = None - self._reading_paused = False - - super().connection_lost(exc) - - def eof_received(self) -> None: - # should call parser.feed_eof() most likely - self._drop_timeout() - - def pause_reading(self) -> None: - super().pause_reading() - self._drop_timeout() - - def resume_reading(self) -> None: - super().resume_reading() - self._reschedule_timeout() - - def set_exception(self, exc: BaseException) -> None: - self._should_close = True - self._drop_timeout() - super().set_exception(exc) - - def set_parser(self, parser: Any, payload: Any) -> None: - # TODO: actual types are: - # parser: WebSocketReader - # payload: FlowControlDataQueue - # but they are not generi enough - # Need an ABC for both types - self._payload = payload - self._payload_parser = parser - - self._drop_timeout() - - if self._tail: - data, self._tail = self._tail, b"" - self.data_received(data) - - def set_response_params( - self, - *, - timer: Optional[BaseTimerContext] = None, - skip_payload: bool = False, - read_until_eof: bool = False, - auto_decompress: bool = True, - read_timeout: Optional[float] = None, - read_bufsize: int = 2**16, - timeout_ceil_threshold: float = 5, - max_line_size: int = 8190, - max_field_size: int = 8190, - ) -> None: - self._skip_payload = skip_payload - - self._read_timeout = read_timeout - - self._timeout_ceil_threshold = timeout_ceil_threshold - - self._parser = HttpResponseParser( - self, - self._loop, - read_bufsize, - timer=timer, - payload_exception=ClientPayloadError, - response_with_body=not skip_payload, - read_until_eof=read_until_eof, - auto_decompress=auto_decompress, - max_line_size=max_line_size, - max_field_size=max_field_size, - ) - - if self._tail: - data, self._tail = self._tail, b"" - self.data_received(data) - - def _drop_timeout(self) -> None: - if self._read_timeout_handle is not None: - self._read_timeout_handle.cancel() - self._read_timeout_handle = None - - def _reschedule_timeout(self) -> None: - timeout = self._read_timeout - if self._read_timeout_handle is not None: - self._read_timeout_handle.cancel() - - if timeout: - self._read_timeout_handle = self._loop.call_later( - timeout, self._on_read_timeout - ) - else: - self._read_timeout_handle = None - - def start_timeout(self) -> None: - self._reschedule_timeout() - - def _on_read_timeout(self) -> None: - exc = ServerTimeoutError("Timeout on reading data from socket") - self.set_exception(exc) - if self._payload is not None: - self._payload.set_exception(exc) - - def data_received(self, data: bytes) -> None: - self._reschedule_timeout() - - if not data: - return - - # custom payload parser - if self._payload_parser is not None: - eof, tail = self._payload_parser.feed_data(data) - if eof: - self._payload = None - self._payload_parser = None - - if tail: - self.data_received(tail) - return - else: - if self._upgraded or self._parser is None: - # i.e. websocket connection, websocket parser is not set yet - self._tail += data - else: - # parse http messages - try: - messages, upgraded, tail = self._parser.feed_data(data) - except BaseException as exc: - if self.transport is not None: - # connection.release() could be called BEFORE - # data_received(), the transport is already - # closed in this case - self.transport.close() - # should_close is True after the call - self.set_exception(exc) - return - - self._upgraded = upgraded - - payload: Optional[StreamReader] = None - for message, payload in messages: - if message.should_close: - self._should_close = True - - self._payload = payload - - if self._skip_payload or status_code_must_be_empty_body( - message.code - ): - self.feed_data((message, EMPTY_PAYLOAD), 0) - else: - self.feed_data((message, payload), 0) - if payload is not None: - # new message(s) was processed - # register timeout handler unsubscribing - # either on end-of-stream or immediately for - # EMPTY_PAYLOAD - if payload is not EMPTY_PAYLOAD: - payload.on_eof(self._drop_timeout) - else: - self._drop_timeout() - - if tail: - if upgraded: - self.data_received(tail) - else: - self._tail = tail diff --git a/.venv/Lib/site-packages/aiohttp/client_reqrep.py b/.venv/Lib/site-packages/aiohttp/client_reqrep.py deleted file mode 100644 index e0de951..0000000 --- a/.venv/Lib/site-packages/aiohttp/client_reqrep.py +++ /dev/null @@ -1,1197 +0,0 @@ -import asyncio -import codecs -import contextlib -import functools -import io -import re -import sys -import traceback -import warnings -from hashlib import md5, sha1, sha256 -from http.cookies import CookieError, Morsel, SimpleCookie -from types import MappingProxyType, TracebackType -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Dict, - Iterable, - List, - Mapping, - Optional, - Tuple, - Type, - Union, - cast, -) - -import attr -from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy -from yarl import URL - -from . import hdrs, helpers, http, multipart, payload -from .abc import AbstractStreamWriter -from .client_exceptions import ( - ClientConnectionError, - ClientOSError, - ClientResponseError, - ContentTypeError, - InvalidURL, - ServerFingerprintMismatch, -) -from .compression_utils import HAS_BROTLI -from .formdata import FormData -from .helpers import ( - BaseTimerContext, - BasicAuth, - HeadersMixin, - TimerNoop, - basicauth_from_netrc, - netrc_from_env, - noop, - reify, - set_result, -) -from .http import ( - SERVER_SOFTWARE, - HttpVersion, - HttpVersion10, - HttpVersion11, - StreamWriter, -) -from .log import client_logger -from .streams import StreamReader -from .typedefs import ( - DEFAULT_JSON_DECODER, - JSONDecoder, - LooseCookies, - LooseHeaders, - RawHeaders, -) - -try: - import ssl - from ssl import SSLContext -except ImportError: # pragma: no cover - ssl = None # type: ignore[assignment] - SSLContext = object # type: ignore[misc,assignment] - - -__all__ = ("ClientRequest", "ClientResponse", "RequestInfo", "Fingerprint") - - -if TYPE_CHECKING: - from .client import ClientSession - from .connector import Connection - from .tracing import Trace - - -_CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]") -json_re = re.compile(r"^application/(?:[\w.+-]+?\+)?json") - - -def _gen_default_accept_encoding() -> str: - return "gzip, deflate, br" if HAS_BROTLI else "gzip, deflate" - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class ContentDisposition: - type: Optional[str] - parameters: "MappingProxyType[str, str]" - filename: Optional[str] - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class RequestInfo: - url: URL - method: str - headers: "CIMultiDictProxy[str]" - real_url: URL = attr.ib() - - @real_url.default - def real_url_default(self) -> URL: - return self.url - - -class Fingerprint: - HASHFUNC_BY_DIGESTLEN = { - 16: md5, - 20: sha1, - 32: sha256, - } - - def __init__(self, fingerprint: bytes) -> None: - digestlen = len(fingerprint) - hashfunc = self.HASHFUNC_BY_DIGESTLEN.get(digestlen) - if not hashfunc: - raise ValueError("fingerprint has invalid length") - elif hashfunc is md5 or hashfunc is sha1: - raise ValueError( - "md5 and sha1 are insecure and " "not supported. Use sha256." - ) - self._hashfunc = hashfunc - self._fingerprint = fingerprint - - @property - def fingerprint(self) -> bytes: - return self._fingerprint - - def check(self, transport: asyncio.Transport) -> None: - if not transport.get_extra_info("sslcontext"): - return - sslobj = transport.get_extra_info("ssl_object") - cert = sslobj.getpeercert(binary_form=True) - got = self._hashfunc(cert).digest() - if got != self._fingerprint: - host, port, *_ = transport.get_extra_info("peername") - raise ServerFingerprintMismatch(self._fingerprint, got, host, port) - - -if ssl is not None: - SSL_ALLOWED_TYPES = (ssl.SSLContext, bool, Fingerprint, type(None)) -else: # pragma: no cover - SSL_ALLOWED_TYPES = (bool, type(None)) - - -def _merge_ssl_params( - ssl: Union["SSLContext", bool, Fingerprint], - verify_ssl: Optional[bool], - ssl_context: Optional["SSLContext"], - fingerprint: Optional[bytes], -) -> Union["SSLContext", bool, Fingerprint]: - if ssl is None: - ssl = True # Double check for backwards compatibility - if verify_ssl is not None and not verify_ssl: - warnings.warn( - "verify_ssl is deprecated, use ssl=False instead", - DeprecationWarning, - stacklevel=3, - ) - if ssl is not True: - raise ValueError( - "verify_ssl, ssl_context, fingerprint and ssl " - "parameters are mutually exclusive" - ) - else: - ssl = False - if ssl_context is not None: - warnings.warn( - "ssl_context is deprecated, use ssl=context instead", - DeprecationWarning, - stacklevel=3, - ) - if ssl is not True: - raise ValueError( - "verify_ssl, ssl_context, fingerprint and ssl " - "parameters are mutually exclusive" - ) - else: - ssl = ssl_context - if fingerprint is not None: - warnings.warn( - "fingerprint is deprecated, " "use ssl=Fingerprint(fingerprint) instead", - DeprecationWarning, - stacklevel=3, - ) - if ssl is not True: - raise ValueError( - "verify_ssl, ssl_context, fingerprint and ssl " - "parameters are mutually exclusive" - ) - else: - ssl = Fingerprint(fingerprint) - if not isinstance(ssl, SSL_ALLOWED_TYPES): - raise TypeError( - "ssl should be SSLContext, bool, Fingerprint or None, " - "got {!r} instead.".format(ssl) - ) - return ssl - - -@attr.s(auto_attribs=True, slots=True, frozen=True) -class ConnectionKey: - # the key should contain an information about used proxy / TLS - # to prevent reusing wrong connections from a pool - host: str - port: Optional[int] - is_ssl: bool - ssl: Union[SSLContext, bool, Fingerprint] - proxy: Optional[URL] - proxy_auth: Optional[BasicAuth] - proxy_headers_hash: Optional[int] # hash(CIMultiDict) - - -def _is_expected_content_type( - response_content_type: str, expected_content_type: str -) -> bool: - if expected_content_type == "application/json": - return json_re.match(response_content_type) is not None - return expected_content_type in response_content_type - - -class ClientRequest: - GET_METHODS = { - hdrs.METH_GET, - hdrs.METH_HEAD, - hdrs.METH_OPTIONS, - hdrs.METH_TRACE, - } - POST_METHODS = {hdrs.METH_PATCH, hdrs.METH_POST, hdrs.METH_PUT} - ALL_METHODS = GET_METHODS.union(POST_METHODS).union({hdrs.METH_DELETE}) - - DEFAULT_HEADERS = { - hdrs.ACCEPT: "*/*", - hdrs.ACCEPT_ENCODING: _gen_default_accept_encoding(), - } - - body = b"" - auth = None - response = None - - __writer = None # async task for streaming data - _continue = None # waiter future for '100 Continue' response - - # N.B. - # Adding __del__ method with self._writer closing doesn't make sense - # because _writer is instance method, thus it keeps a reference to self. - # Until writer has finished finalizer will not be called. - - def __init__( - self, - method: str, - url: URL, - *, - params: Optional[Mapping[str, str]] = None, - headers: Optional[LooseHeaders] = None, - skip_auto_headers: Iterable[str] = frozenset(), - data: Any = None, - cookies: Optional[LooseCookies] = None, - auth: Optional[BasicAuth] = None, - version: http.HttpVersion = http.HttpVersion11, - compress: Optional[str] = None, - chunked: Optional[bool] = None, - expect100: bool = False, - loop: Optional[asyncio.AbstractEventLoop] = None, - response_class: Optional[Type["ClientResponse"]] = None, - proxy: Optional[URL] = None, - proxy_auth: Optional[BasicAuth] = None, - timer: Optional[BaseTimerContext] = None, - session: Optional["ClientSession"] = None, - ssl: Union[SSLContext, bool, Fingerprint] = True, - proxy_headers: Optional[LooseHeaders] = None, - traces: Optional[List["Trace"]] = None, - trust_env: bool = False, - server_hostname: Optional[str] = None, - ): - if loop is None: - loop = asyncio.get_event_loop() - - match = _CONTAINS_CONTROL_CHAR_RE.search(method) - if match: - raise ValueError( - f"Method cannot contain non-token characters {method!r} " - "(found at least {match.group()!r})" - ) - - assert isinstance(url, URL), url - assert isinstance(proxy, (URL, type(None))), proxy - # FIXME: session is None in tests only, need to fix tests - # assert session is not None - self._session = cast("ClientSession", session) - if params: - q = MultiDict(url.query) - url2 = url.with_query(params) - q.extend(url2.query) - url = url.with_query(q) - self.original_url = url - self.url = url.with_fragment(None) - self.method = method.upper() - self.chunked = chunked - self.compress = compress - self.loop = loop - self.length = None - if response_class is None: - real_response_class = ClientResponse - else: - real_response_class = response_class - self.response_class: Type[ClientResponse] = real_response_class - self._timer = timer if timer is not None else TimerNoop() - self._ssl = ssl if ssl is not None else True - self.server_hostname = server_hostname - - if loop.get_debug(): - self._source_traceback = traceback.extract_stack(sys._getframe(1)) - - self.update_version(version) - self.update_host(url) - self.update_headers(headers) - self.update_auto_headers(skip_auto_headers) - self.update_cookies(cookies) - self.update_content_encoding(data) - self.update_auth(auth, trust_env) - self.update_proxy(proxy, proxy_auth, proxy_headers) - - self.update_body_from_data(data) - if data is not None or self.method not in self.GET_METHODS: - self.update_transfer_encoding() - self.update_expect_continue(expect100) - if traces is None: - traces = [] - self._traces = traces - - def __reset_writer(self, _: object = None) -> None: - self.__writer = None - - @property - def _writer(self) -> Optional["asyncio.Task[None]"]: - return self.__writer - - @_writer.setter - def _writer(self, writer: Optional["asyncio.Task[None]"]) -> None: - if self.__writer is not None: - self.__writer.remove_done_callback(self.__reset_writer) - self.__writer = writer - if writer is not None: - writer.add_done_callback(self.__reset_writer) - - def is_ssl(self) -> bool: - return self.url.scheme in ("https", "wss") - - @property - def ssl(self) -> Union["SSLContext", bool, Fingerprint]: - return self._ssl - - @property - def connection_key(self) -> ConnectionKey: - proxy_headers = self.proxy_headers - if proxy_headers: - h: Optional[int] = hash(tuple((k, v) for k, v in proxy_headers.items())) - else: - h = None - return ConnectionKey( - self.host, - self.port, - self.is_ssl(), - self.ssl, - self.proxy, - self.proxy_auth, - h, - ) - - @property - def host(self) -> str: - ret = self.url.raw_host - assert ret is not None - return ret - - @property - def port(self) -> Optional[int]: - return self.url.port - - @property - def request_info(self) -> RequestInfo: - headers: CIMultiDictProxy[str] = CIMultiDictProxy(self.headers) - return RequestInfo(self.url, self.method, headers, self.original_url) - - def update_host(self, url: URL) -> None: - """Update destination host, port and connection type (ssl).""" - # get host/port - if not url.raw_host: - raise InvalidURL(url) - - # basic auth info - username, password = url.user, url.password - if username: - self.auth = helpers.BasicAuth(username, password or "") - - def update_version(self, version: Union[http.HttpVersion, str]) -> None: - """Convert request version to two elements tuple. - - parser HTTP version '1.1' => (1, 1) - """ - if isinstance(version, str): - v = [part.strip() for part in version.split(".", 1)] - try: - version = http.HttpVersion(int(v[0]), int(v[1])) - except ValueError: - raise ValueError( - f"Can not parse http version number: {version}" - ) from None - self.version = version - - def update_headers(self, headers: Optional[LooseHeaders]) -> None: - """Update request headers.""" - self.headers: CIMultiDict[str] = CIMultiDict() - - # add host - netloc = cast(str, self.url.raw_host) - if helpers.is_ipv6_address(netloc): - netloc = f"[{netloc}]" - # See https://github.com/aio-libs/aiohttp/issues/3636. - netloc = netloc.rstrip(".") - if self.url.port is not None and not self.url.is_default_port(): - netloc += ":" + str(self.url.port) - self.headers[hdrs.HOST] = netloc - - if headers: - if isinstance(headers, (dict, MultiDictProxy, MultiDict)): - headers = headers.items() # type: ignore[assignment] - - for key, value in headers: # type: ignore[misc] - # A special case for Host header - if key.lower() == "host": - self.headers[key] = value - else: - self.headers.add(key, value) - - def update_auto_headers(self, skip_auto_headers: Iterable[str]) -> None: - self.skip_auto_headers = CIMultiDict( - (hdr, None) for hdr in sorted(skip_auto_headers) - ) - used_headers = self.headers.copy() - used_headers.extend(self.skip_auto_headers) # type: ignore[arg-type] - - for hdr, val in self.DEFAULT_HEADERS.items(): - if hdr not in used_headers: - self.headers.add(hdr, val) - - if hdrs.USER_AGENT not in used_headers: - self.headers[hdrs.USER_AGENT] = SERVER_SOFTWARE - - def update_cookies(self, cookies: Optional[LooseCookies]) -> None: - """Update request cookies header.""" - if not cookies: - return - - c = SimpleCookie() - if hdrs.COOKIE in self.headers: - c.load(self.headers.get(hdrs.COOKIE, "")) - del self.headers[hdrs.COOKIE] - - if isinstance(cookies, Mapping): - iter_cookies = cookies.items() - else: - iter_cookies = cookies # type: ignore[assignment] - for name, value in iter_cookies: - if isinstance(value, Morsel): - # Preserve coded_value - mrsl_val = value.get(value.key, Morsel()) - mrsl_val.set(value.key, value.value, value.coded_value) - c[name] = mrsl_val - else: - c[name] = value # type: ignore[assignment] - - self.headers[hdrs.COOKIE] = c.output(header="", sep=";").strip() - - def update_content_encoding(self, data: Any) -> None: - """Set request content encoding.""" - if data is None: - return - - enc = self.headers.get(hdrs.CONTENT_ENCODING, "").lower() - if enc: - if self.compress: - raise ValueError( - "compress can not be set " "if Content-Encoding header is set" - ) - elif self.compress: - if not isinstance(self.compress, str): - self.compress = "deflate" - self.headers[hdrs.CONTENT_ENCODING] = self.compress - self.chunked = True # enable chunked, no need to deal with length - - def update_transfer_encoding(self) -> None: - """Analyze transfer-encoding header.""" - te = self.headers.get(hdrs.TRANSFER_ENCODING, "").lower() - - if "chunked" in te: - if self.chunked: - raise ValueError( - "chunked can not be set " - 'if "Transfer-Encoding: chunked" header is set' - ) - - elif self.chunked: - if hdrs.CONTENT_LENGTH in self.headers: - raise ValueError( - "chunked can not be set " "if Content-Length header is set" - ) - - self.headers[hdrs.TRANSFER_ENCODING] = "chunked" - else: - if hdrs.CONTENT_LENGTH not in self.headers: - self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body)) - - def update_auth(self, auth: Optional[BasicAuth], trust_env: bool = False) -> None: - """Set basic auth.""" - if auth is None: - auth = self.auth - if auth is None and trust_env and self.url.host is not None: - netrc_obj = netrc_from_env() - with contextlib.suppress(LookupError): - auth = basicauth_from_netrc(netrc_obj, self.url.host) - if auth is None: - return - - if not isinstance(auth, helpers.BasicAuth): - raise TypeError("BasicAuth() tuple is required instead") - - self.headers[hdrs.AUTHORIZATION] = auth.encode() - - def update_body_from_data(self, body: Any) -> None: - if body is None: - return - - # FormData - if isinstance(body, FormData): - body = body() - - try: - body = payload.PAYLOAD_REGISTRY.get(body, disposition=None) - except payload.LookupError: - body = FormData(body)() - - self.body = body - - # enable chunked encoding if needed - if not self.chunked: - if hdrs.CONTENT_LENGTH not in self.headers: - size = body.size - if size is None: - self.chunked = True - else: - if hdrs.CONTENT_LENGTH not in self.headers: - self.headers[hdrs.CONTENT_LENGTH] = str(size) - - # copy payload headers - assert body.headers - for (key, value) in body.headers.items(): - if key in self.headers: - continue - if key in self.skip_auto_headers: - continue - self.headers[key] = value - - def update_expect_continue(self, expect: bool = False) -> None: - if expect: - self.headers[hdrs.EXPECT] = "100-continue" - elif self.headers.get(hdrs.EXPECT, "").lower() == "100-continue": - expect = True - - if expect: - self._continue = self.loop.create_future() - - def update_proxy( - self, - proxy: Optional[URL], - proxy_auth: Optional[BasicAuth], - proxy_headers: Optional[LooseHeaders], - ) -> None: - if proxy_auth and not isinstance(proxy_auth, helpers.BasicAuth): - raise ValueError("proxy_auth must be None or BasicAuth() tuple") - self.proxy = proxy - self.proxy_auth = proxy_auth - self.proxy_headers = proxy_headers - - def keep_alive(self) -> bool: - if self.version < HttpVersion10: - # keep alive not supported at all - return False - if self.version == HttpVersion10: - if self.headers.get(hdrs.CONNECTION) == "keep-alive": - return True - else: # no headers means we close for Http 1.0 - return False - elif self.headers.get(hdrs.CONNECTION) == "close": - return False - - return True - - async def write_bytes( - self, writer: AbstractStreamWriter, conn: "Connection" - ) -> None: - """Support coroutines that yields bytes objects.""" - # 100 response - if self._continue is not None: - try: - await writer.drain() - await self._continue - except asyncio.CancelledError: - return - - protocol = conn.protocol - assert protocol is not None - try: - if isinstance(self.body, payload.Payload): - await self.body.write(writer) - else: - if isinstance(self.body, (bytes, bytearray)): - self.body = (self.body,) # type: ignore[assignment] - - for chunk in self.body: - await writer.write(chunk) # type: ignore[arg-type] - except OSError as exc: - if exc.errno is None and isinstance(exc, asyncio.TimeoutError): - protocol.set_exception(exc) - else: - new_exc = ClientOSError( - exc.errno, "Can not write request body for %s" % self.url - ) - new_exc.__context__ = exc - new_exc.__cause__ = exc - protocol.set_exception(new_exc) - except asyncio.CancelledError: - await writer.write_eof() - except Exception as exc: - protocol.set_exception(exc) - else: - await writer.write_eof() - protocol.start_timeout() - - async def send(self, conn: "Connection") -> "ClientResponse": - # Specify request target: - # - CONNECT request must send authority form URI - # - not CONNECT proxy must send absolute form URI - # - most common is origin form URI - if self.method == hdrs.METH_CONNECT: - connect_host = self.url.raw_host - assert connect_host is not None - if helpers.is_ipv6_address(connect_host): - connect_host = f"[{connect_host}]" - path = f"{connect_host}:{self.url.port}" - elif self.proxy and not self.is_ssl(): - path = str(self.url) - else: - path = self.url.raw_path - if self.url.raw_query_string: - path += "?" + self.url.raw_query_string - - protocol = conn.protocol - assert protocol is not None - writer = StreamWriter( - protocol, - self.loop, - on_chunk_sent=functools.partial( - self._on_chunk_request_sent, self.method, self.url - ), - on_headers_sent=functools.partial( - self._on_headers_request_sent, self.method, self.url - ), - ) - - if self.compress: - writer.enable_compression(self.compress) - - if self.chunked is not None: - writer.enable_chunking() - - # set default content-type - if ( - self.method in self.POST_METHODS - and hdrs.CONTENT_TYPE not in self.skip_auto_headers - and hdrs.CONTENT_TYPE not in self.headers - ): - self.headers[hdrs.CONTENT_TYPE] = "application/octet-stream" - - # set the connection header - connection = self.headers.get(hdrs.CONNECTION) - if not connection: - if self.keep_alive(): - if self.version == HttpVersion10: - connection = "keep-alive" - else: - if self.version == HttpVersion11: - connection = "close" - - if connection is not None: - self.headers[hdrs.CONNECTION] = connection - - # status + headers - status_line = "{0} {1} HTTP/{v.major}.{v.minor}".format( - self.method, path, v=self.version - ) - await writer.write_headers(status_line, self.headers) - - self._writer = self.loop.create_task(self.write_bytes(writer, conn)) - - response_class = self.response_class - assert response_class is not None - self.response = response_class( - self.method, - self.original_url, - writer=self._writer, - continue100=self._continue, - timer=self._timer, - request_info=self.request_info, - traces=self._traces, - loop=self.loop, - session=self._session, - ) - return self.response - - async def close(self) -> None: - if self._writer is not None: - with contextlib.suppress(asyncio.CancelledError): - await self._writer - - def terminate(self) -> None: - if self._writer is not None: - if not self.loop.is_closed(): - self._writer.cancel() - self._writer.remove_done_callback(self.__reset_writer) - self._writer = None - - async def _on_chunk_request_sent(self, method: str, url: URL, chunk: bytes) -> None: - for trace in self._traces: - await trace.send_request_chunk_sent(method, url, chunk) - - async def _on_headers_request_sent( - self, method: str, url: URL, headers: "CIMultiDict[str]" - ) -> None: - for trace in self._traces: - await trace.send_request_headers(method, url, headers) - - -class ClientResponse(HeadersMixin): - - # Some of these attributes are None when created, - # but will be set by the start() method. - # As the end user will likely never see the None values, we cheat the types below. - # from the Status-Line of the response - version: Optional[HttpVersion] = None # HTTP-Version - status: int = None # type: ignore[assignment] # Status-Code - reason: Optional[str] = None # Reason-Phrase - - content: StreamReader = None # type: ignore[assignment] # Payload stream - _headers: CIMultiDictProxy[str] = None # type: ignore[assignment] - _raw_headers: RawHeaders = None # type: ignore[assignment] - - _connection = None # current connection - _source_traceback: Optional[traceback.StackSummary] = None - # set up by ClientRequest after ClientResponse object creation - # post-init stage allows to not change ctor signature - _closed = True # to allow __del__ for non-initialized properly response - _released = False - __writer = None - - def __init__( - self, - method: str, - url: URL, - *, - writer: "asyncio.Task[None]", - continue100: Optional["asyncio.Future[bool]"], - timer: BaseTimerContext, - request_info: RequestInfo, - traces: List["Trace"], - loop: asyncio.AbstractEventLoop, - session: "ClientSession", - ) -> None: - assert isinstance(url, URL) - - self.method = method - self.cookies = SimpleCookie() - - self._real_url = url - self._url = url.with_fragment(None) - self._body: Any = None - self._writer: Optional[asyncio.Task[None]] = writer - self._continue = continue100 # None by default - self._closed = True - self._history: Tuple[ClientResponse, ...] = () - self._request_info = request_info - self._timer = timer if timer is not None else TimerNoop() - self._cache: Dict[str, Any] = {} - self._traces = traces - self._loop = loop - # store a reference to session #1985 - self._session: Optional[ClientSession] = session - # Save reference to _resolve_charset, so that get_encoding() will still - # work after the response has finished reading the body. - if session is None: - # TODO: Fix session=None in tests (see ClientRequest.__init__). - self._resolve_charset: Callable[ - ["ClientResponse", bytes], str - ] = lambda *_: "utf-8" - else: - self._resolve_charset = session._resolve_charset - if loop.get_debug(): - self._source_traceback = traceback.extract_stack(sys._getframe(1)) - - def __reset_writer(self, _: object = None) -> None: - self.__writer = None - - @property - def _writer(self) -> Optional["asyncio.Task[None]"]: - return self.__writer - - @_writer.setter - def _writer(self, writer: Optional["asyncio.Task[None]"]) -> None: - if self.__writer is not None: - self.__writer.remove_done_callback(self.__reset_writer) - self.__writer = writer - if writer is not None: - writer.add_done_callback(self.__reset_writer) - - @reify - def url(self) -> URL: - return self._url - - @reify - def url_obj(self) -> URL: - warnings.warn("Deprecated, use .url #1654", DeprecationWarning, stacklevel=2) - return self._url - - @reify - def real_url(self) -> URL: - return self._real_url - - @reify - def host(self) -> str: - assert self._url.host is not None - return self._url.host - - @reify - def headers(self) -> "CIMultiDictProxy[str]": - return self._headers - - @reify - def raw_headers(self) -> RawHeaders: - return self._raw_headers - - @reify - def request_info(self) -> RequestInfo: - return self._request_info - - @reify - def content_disposition(self) -> Optional[ContentDisposition]: - raw = self._headers.get(hdrs.CONTENT_DISPOSITION) - if raw is None: - return None - disposition_type, params_dct = multipart.parse_content_disposition(raw) - params = MappingProxyType(params_dct) - filename = multipart.content_disposition_filename(params) - return ContentDisposition(disposition_type, params, filename) - - def __del__(self, _warnings: Any = warnings) -> None: - if self._closed: - return - - if self._connection is not None: - self._connection.release() - self._cleanup_writer() - - if self._loop.get_debug(): - kwargs = {"source": self} - _warnings.warn(f"Unclosed response {self!r}", ResourceWarning, **kwargs) - context = {"client_response": self, "message": "Unclosed response"} - if self._source_traceback: - context["source_traceback"] = self._source_traceback - self._loop.call_exception_handler(context) - - def __repr__(self) -> str: - out = io.StringIO() - ascii_encodable_url = str(self.url) - if self.reason: - ascii_encodable_reason = self.reason.encode( - "ascii", "backslashreplace" - ).decode("ascii") - else: - ascii_encodable_reason = "None" - print( - "".format( - ascii_encodable_url, self.status, ascii_encodable_reason - ), - file=out, - ) - print(self.headers, file=out) - return out.getvalue() - - @property - def connection(self) -> Optional["Connection"]: - return self._connection - - @reify - def history(self) -> Tuple["ClientResponse", ...]: - """A sequence of of responses, if redirects occurred.""" - return self._history - - @reify - def links(self) -> "MultiDictProxy[MultiDictProxy[Union[str, URL]]]": - links_str = ", ".join(self.headers.getall("link", [])) - - if not links_str: - return MultiDictProxy(MultiDict()) - - links: MultiDict[MultiDictProxy[Union[str, URL]]] = MultiDict() - - for val in re.split(r",(?=\s*<)", links_str): - match = re.match(r"\s*<(.*)>(.*)", val) - if match is None: # pragma: no cover - # the check exists to suppress mypy error - continue - url, params_str = match.groups() - params = params_str.split(";")[1:] - - link: MultiDict[Union[str, URL]] = MultiDict() - - for param in params: - match = re.match(r"^\s*(\S*)\s*=\s*(['\"]?)(.*?)(\2)\s*$", param, re.M) - if match is None: # pragma: no cover - # the check exists to suppress mypy error - continue - key, _, value, _ = match.groups() - - link.add(key, value) - - key = link.get("rel", url) - - link.add("url", self.url.join(URL(url))) - - links.add(str(key), MultiDictProxy(link)) - - return MultiDictProxy(links) - - async def start(self, connection: "Connection") -> "ClientResponse": - """Start response processing.""" - self._closed = False - self._protocol = connection.protocol - self._connection = connection - - with self._timer: - while True: - # read response - try: - protocol = self._protocol - message, payload = await protocol.read() # type: ignore[union-attr] - except http.HttpProcessingError as exc: - raise ClientResponseError( - self.request_info, - self.history, - status=exc.code, - message=exc.message, - headers=exc.headers, - ) from exc - - if message.code < 100 or message.code > 199 or message.code == 101: - break - - if self._continue is not None: - set_result(self._continue, True) - self._continue = None - - # payload eof handler - payload.on_eof(self._response_eof) - - # response status - self.version = message.version - self.status = message.code - self.reason = message.reason - - # headers - self._headers = message.headers # type is CIMultiDictProxy - self._raw_headers = message.raw_headers # type is Tuple[bytes, bytes] - - # payload - self.content = payload - - # cookies - for hdr in self.headers.getall(hdrs.SET_COOKIE, ()): - try: - self.cookies.load(hdr) - except CookieError as exc: - client_logger.warning("Can not load response cookies: %s", exc) - return self - - def _response_eof(self) -> None: - if self._closed: - return - - # protocol could be None because connection could be detached - protocol = self._connection and self._connection.protocol - if protocol is not None and protocol.upgraded: - return - - self._closed = True - self._cleanup_writer() - self._release_connection() - - @property - def closed(self) -> bool: - return self._closed - - def close(self) -> None: - if not self._released: - self._notify_content() - - self._closed = True - if self._loop is None or self._loop.is_closed(): - return - - self._cleanup_writer() - if self._connection is not None: - self._connection.close() - self._connection = None - - def release(self) -> Any: - if not self._released: - self._notify_content() - - self._closed = True - - self._cleanup_writer() - self._release_connection() - return noop() - - @property - def ok(self) -> bool: - """Returns ``True`` if ``status`` is less than ``400``, ``False`` if not. - - This is **not** a check for ``200 OK`` but a check that the response - status is under 400. - """ - return 400 > self.status - - def raise_for_status(self) -> None: - if not self.ok: - # reason should always be not None for a started response - assert self.reason is not None - self.release() - raise ClientResponseError( - self.request_info, - self.history, - status=self.status, - message=self.reason, - headers=self.headers, - ) - - def _release_connection(self) -> None: - if self._connection is not None: - if self._writer is None: - self._connection.release() - self._connection = None - else: - self._writer.add_done_callback(lambda f: self._release_connection()) - - async def _wait_released(self) -> None: - if self._writer is not None: - await self._writer - self._release_connection() - - def _cleanup_writer(self) -> None: - if self._writer is not None: - self._writer.cancel() - self._session = None - - def _notify_content(self) -> None: - content = self.content - if content and content.exception() is None: - content.set_exception(ClientConnectionError("Connection closed")) - self._released = True - - async def wait_for_close(self) -> None: - if self._writer is not None: - await self._writer - self.release() - - async def read(self) -> bytes: - """Read response payload.""" - if self._body is None: - try: - self._body = await self.content.read() - for trace in self._traces: - await trace.send_response_chunk_received( - self.method, self.url, self._body - ) - except BaseException: - self.close() - raise - elif self._released: # Response explicitly released - raise ClientConnectionError("Connection closed") - - protocol = self._connection and self._connection.protocol - if protocol is None or not protocol.upgraded: - await self._wait_released() # Underlying connection released - return self._body # type: ignore[no-any-return] - - def get_encoding(self) -> str: - ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower() - mimetype = helpers.parse_mimetype(ctype) - - encoding = mimetype.parameters.get("charset") - if encoding: - with contextlib.suppress(LookupError): - return codecs.lookup(encoding).name - - if mimetype.type == "application" and ( - mimetype.subtype == "json" or mimetype.subtype == "rdap" - ): - # RFC 7159 states that the default encoding is UTF-8. - # RFC 7483 defines application/rdap+json - return "utf-8" - - if self._body is None: - raise RuntimeError( - "Cannot compute fallback encoding of a not yet read body" - ) - - return self._resolve_charset(self, self._body) - - async def text(self, encoding: Optional[str] = None, errors: str = "strict") -> str: - """Read response payload and decode.""" - if self._body is None: - await self.read() - - if encoding is None: - encoding = self.get_encoding() - - return self._body.decode( # type: ignore[no-any-return,union-attr] - encoding, errors=errors - ) - - async def json( - self, - *, - encoding: Optional[str] = None, - loads: JSONDecoder = DEFAULT_JSON_DECODER, - content_type: Optional[str] = "application/json", - ) -> Any: - """Read and decodes JSON response.""" - if self._body is None: - await self.read() - - if content_type: - ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower() - if not _is_expected_content_type(ctype, content_type): - raise ContentTypeError( - self.request_info, - self.history, - message=( - "Attempt to decode JSON with " "unexpected mimetype: %s" % ctype - ), - headers=self.headers, - ) - - stripped = self._body.strip() # type: ignore[union-attr] - if not stripped: - return None - - if encoding is None: - encoding = self.get_encoding() - - return loads(stripped.decode(encoding)) - - async def __aenter__(self) -> "ClientResponse": - return self - - async def __aexit__( - self, - exc_type: Optional[Type[BaseException]], - exc_val: Optional[BaseException], - exc_tb: Optional[TracebackType], - ) -> None: - # similar to _RequestContextManager, we do not need to check - # for exceptions, response object can close connection - # if state is broken - self.release() - await self.wait_for_close() diff --git a/.venv/Lib/site-packages/aiohttp/client_ws.py b/.venv/Lib/site-packages/aiohttp/client_ws.py deleted file mode 100644 index d9c74a3..0000000 --- a/.venv/Lib/site-packages/aiohttp/client_ws.py +++ /dev/null @@ -1,315 +0,0 @@ -"""WebSocket client for asyncio.""" - -import asyncio -import sys -from typing import Any, Optional, cast - -from .client_exceptions import ClientError -from .client_reqrep import ClientResponse -from .helpers import call_later, set_result -from .http import ( - WS_CLOSED_MESSAGE, - WS_CLOSING_MESSAGE, - WebSocketError, - WSCloseCode, - WSMessage, - WSMsgType, -) -from .http_websocket import WebSocketWriter # WSMessage -from .streams import EofStream, FlowControlDataQueue -from .typedefs import ( - DEFAULT_JSON_DECODER, - DEFAULT_JSON_ENCODER, - JSONDecoder, - JSONEncoder, -) - -if sys.version_info >= (3, 11): - import asyncio as async_timeout -else: - import async_timeout - - -class ClientWebSocketResponse: - def __init__( - self, - reader: "FlowControlDataQueue[WSMessage]", - writer: WebSocketWriter, - protocol: Optional[str], - response: ClientResponse, - timeout: float, - autoclose: bool, - autoping: bool, - loop: asyncio.AbstractEventLoop, - *, - receive_timeout: Optional[float] = None, - heartbeat: Optional[float] = None, - compress: int = 0, - client_notakeover: bool = False, - ) -> None: - self._response = response - self._conn = response.connection - - self._writer = writer - self._reader = reader - self._protocol = protocol - self._closed = False - self._closing = False - self._close_code: Optional[int] = None - self._timeout = timeout - self._receive_timeout = receive_timeout - self._autoclose = autoclose - self._autoping = autoping - self._heartbeat = heartbeat - self._heartbeat_cb: Optional[asyncio.TimerHandle] = None - if heartbeat is not None: - self._pong_heartbeat = heartbeat / 2.0 - self._pong_response_cb: Optional[asyncio.TimerHandle] = None - self._loop = loop - self._waiting: Optional[asyncio.Future[bool]] = None - self._exception: Optional[BaseException] = None - self._compress = compress - self._client_notakeover = client_notakeover - - self._reset_heartbeat() - - def _cancel_heartbeat(self) -> None: - if self._pong_response_cb is not None: - self._pong_response_cb.cancel() - self._pong_response_cb = None - - if self._heartbeat_cb is not None: - self._heartbeat_cb.cancel() - self._heartbeat_cb = None - - def _reset_heartbeat(self) -> None: - self._cancel_heartbeat() - - if self._heartbeat is not None: - self._heartbeat_cb = call_later( - self._send_heartbeat, - self._heartbeat, - self._loop, - timeout_ceil_threshold=self._conn._connector._timeout_ceil_threshold - if self._conn is not None - else 5, - ) - - def _send_heartbeat(self) -> None: - if self._heartbeat is not None and not self._closed: - # fire-and-forget a task is not perfect but maybe ok for - # sending ping. Otherwise we need a long-living heartbeat - # task in the class. - self._loop.create_task(self._writer.ping()) - - if self._pong_response_cb is not None: - self._pong_response_cb.cancel() - self._pong_response_cb = call_later( - self._pong_not_received, - self._pong_heartbeat, - self._loop, - timeout_ceil_threshold=self._conn._connector._timeout_ceil_threshold - if self._conn is not None - else 5, - ) - - def _pong_not_received(self) -> None: - if not self._closed: - self._closed = True - self._close_code = WSCloseCode.ABNORMAL_CLOSURE - self._exception = asyncio.TimeoutError() - self._response.close() - - @property - def closed(self) -> bool: - return self._closed - - @property - def close_code(self) -> Optional[int]: - return self._close_code - - @property - def protocol(self) -> Optional[str]: - return self._protocol - - @property - def compress(self) -> int: - return self._compress - - @property - def client_notakeover(self) -> bool: - return self._client_notakeover - - def get_extra_info(self, name: str, default: Any = None) -> Any: - """extra info from connection transport""" - conn = self._response.connection - if conn is None: - return default - transport = conn.transport - if transport is None: - return default - return transport.get_extra_info(name, default) - - def exception(self) -> Optional[BaseException]: - return self._exception - - async def ping(self, message: bytes = b"") -> None: - await self._writer.ping(message) - - async def pong(self, message: bytes = b"") -> None: - await self._writer.pong(message) - - async def send_str(self, data: str, compress: Optional[int] = None) -> None: - if not isinstance(data, str): - raise TypeError("data argument must be str (%r)" % type(data)) - await self._writer.send(data, binary=False, compress=compress) - - async def send_bytes(self, data: bytes, compress: Optional[int] = None) -> None: - if not isinstance(data, (bytes, bytearray, memoryview)): - raise TypeError("data argument must be byte-ish (%r)" % type(data)) - await self._writer.send(data, binary=True, compress=compress) - - async def send_json( - self, - data: Any, - compress: Optional[int] = None, - *, - dumps: JSONEncoder = DEFAULT_JSON_ENCODER, - ) -> None: - await self.send_str(dumps(data), compress=compress) - - async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bool: - # we need to break `receive()` cycle first, - # `close()` may be called from different task - if self._waiting is not None and not self._closing: - self._closing = True - self._reader.feed_data(WS_CLOSING_MESSAGE, 0) - await self._waiting - - if not self._closed: - self._cancel_heartbeat() - self._closed = True - try: - await self._writer.close(code, message) - except asyncio.CancelledError: - self._close_code = WSCloseCode.ABNORMAL_CLOSURE - self._response.close() - raise - except Exception as exc: - self._close_code = WSCloseCode.ABNORMAL_CLOSURE - self._exception = exc - self._response.close() - return True - - if self._close_code: - self._response.close() - return True - - while True: - try: - async with async_timeout.timeout(self._timeout): - msg = await self._reader.read() - except asyncio.CancelledError: - self._close_code = WSCloseCode.ABNORMAL_CLOSURE - self._response.close() - raise - except Exception as exc: - self._close_code = WSCloseCode.ABNORMAL_CLOSURE - self._exception = exc - self._response.close() - return True - - if msg.type == WSMsgType.CLOSE: - self._close_code = msg.data - self._response.close() - return True - else: - return False - - async def receive(self, timeout: Optional[float] = None) -> WSMessage: - while True: - if self._waiting is not None: - raise RuntimeError("Concurrent call to receive() is not allowed") - - if self._closed: - return WS_CLOSED_MESSAGE - elif self._closing: - await self.close() - return WS_CLOSED_MESSAGE - - try: - self._waiting = self._loop.create_future() - try: - async with async_timeout.timeout(timeout or self._receive_timeout): - msg = await self._reader.read() - self._reset_heartbeat() - finally: - waiter = self._waiting - self._waiting = None - set_result(waiter, True) - except (asyncio.CancelledError, asyncio.TimeoutError): - self._close_code = WSCloseCode.ABNORMAL_CLOSURE - raise - except EofStream: - self._close_code = WSCloseCode.OK - await self.close() - return WSMessage(WSMsgType.CLOSED, None, None) - except ClientError: - self._closed = True - self._close_code = WSCloseCode.ABNORMAL_CLOSURE - return WS_CLOSED_MESSAGE - except WebSocketError as exc: - self._close_code = exc.code - await self.close(code=exc.code) - return WSMessage(WSMsgType.ERROR, exc, None) - except Exception as exc: - self._exception = exc - self._closing = True - self._close_code = WSCloseCode.ABNORMAL_CLOSURE - await self.close() - return WSMessage(WSMsgType.ERROR, exc, None) - - if msg.type == WSMsgType.CLOSE: - self._closing = True - self._close_code = msg.data - if not self._closed and self._autoclose: - await self.close() - elif msg.type == WSMsgType.CLOSING: - self._closing = True - elif msg.type == WSMsgType.PING and self._autoping: - await self.pong(msg.data) - continue - elif msg.type == WSMsgType.PONG and self._autoping: - continue - - return msg - - async def receive_str(self, *, timeout: Optional[float] = None) -> str: - msg = await self.receive(timeout) - if msg.type != WSMsgType.TEXT: - raise TypeError(f"Received message {msg.type}:{msg.data!r} is not str") - return cast(str, msg.data) - - async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes: - msg = await self.receive(timeout) - if msg.type != WSMsgType.BINARY: - raise TypeError(f"Received message {msg.type}:{msg.data!r} is not bytes") - return cast(bytes, msg.data) - - async def receive_json( - self, - *, - loads: JSONDecoder = DEFAULT_JSON_DECODER, - timeout: Optional[float] = None, - ) -> Any: - data = await self.receive_str(timeout=timeout) - return loads(data) - - def __aiter__(self) -> "ClientWebSocketResponse": - return self - - async def __anext__(self) -> WSMessage: - msg = await self.receive() - if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.CLOSED): - raise StopAsyncIteration - return msg diff --git a/.venv/Lib/site-packages/aiohttp/compression_utils.py b/.venv/Lib/site-packages/aiohttp/compression_utils.py deleted file mode 100644 index 9631d37..0000000 --- a/.venv/Lib/site-packages/aiohttp/compression_utils.py +++ /dev/null @@ -1,157 +0,0 @@ -import asyncio -import zlib -from concurrent.futures import Executor -from typing import Optional, cast - -try: - try: - import brotlicffi as brotli - except ImportError: - import brotli - - HAS_BROTLI = True -except ImportError: # pragma: no cover - HAS_BROTLI = False - -MAX_SYNC_CHUNK_SIZE = 1024 - - -def encoding_to_mode( - encoding: Optional[str] = None, - suppress_deflate_header: bool = False, -) -> int: - if encoding == "gzip": - return 16 + zlib.MAX_WBITS - - return -zlib.MAX_WBITS if suppress_deflate_header else zlib.MAX_WBITS - - -class ZlibBaseHandler: - def __init__( - self, - mode: int, - executor: Optional[Executor] = None, - max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE, - ): - self._mode = mode - self._executor = executor - self._max_sync_chunk_size = max_sync_chunk_size - - -class ZLibCompressor(ZlibBaseHandler): - def __init__( - self, - encoding: Optional[str] = None, - suppress_deflate_header: bool = False, - level: Optional[int] = None, - wbits: Optional[int] = None, - strategy: int = zlib.Z_DEFAULT_STRATEGY, - executor: Optional[Executor] = None, - max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE, - ): - super().__init__( - mode=encoding_to_mode(encoding, suppress_deflate_header) - if wbits is None - else wbits, - executor=executor, - max_sync_chunk_size=max_sync_chunk_size, - ) - if level is None: - self._compressor = zlib.compressobj(wbits=self._mode, strategy=strategy) - else: - self._compressor = zlib.compressobj( - wbits=self._mode, strategy=strategy, level=level - ) - self._compress_lock = asyncio.Lock() - - def compress_sync(self, data: bytes) -> bytes: - return self._compressor.compress(data) - - async def compress(self, data: bytes) -> bytes: - async with self._compress_lock: - # To ensure the stream is consistent in the event - # there are multiple writers, we need to lock - # the compressor so that only one writer can - # compress at a time. - if ( - self._max_sync_chunk_size is not None - and len(data) > self._max_sync_chunk_size - ): - return await asyncio.get_event_loop().run_in_executor( - self._executor, self.compress_sync, data - ) - return self.compress_sync(data) - - def flush(self, mode: int = zlib.Z_FINISH) -> bytes: - return self._compressor.flush(mode) - - -class ZLibDecompressor(ZlibBaseHandler): - def __init__( - self, - encoding: Optional[str] = None, - suppress_deflate_header: bool = False, - executor: Optional[Executor] = None, - max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE, - ): - super().__init__( - mode=encoding_to_mode(encoding, suppress_deflate_header), - executor=executor, - max_sync_chunk_size=max_sync_chunk_size, - ) - self._decompressor = zlib.decompressobj(wbits=self._mode) - - def decompress_sync(self, data: bytes, max_length: int = 0) -> bytes: - return self._decompressor.decompress(data, max_length) - - async def decompress(self, data: bytes, max_length: int = 0) -> bytes: - if ( - self._max_sync_chunk_size is not None - and len(data) > self._max_sync_chunk_size - ): - return await asyncio.get_event_loop().run_in_executor( - self._executor, self.decompress_sync, data, max_length - ) - return self.decompress_sync(data, max_length) - - def flush(self, length: int = 0) -> bytes: - return ( - self._decompressor.flush(length) - if length > 0 - else self._decompressor.flush() - ) - - @property - def eof(self) -> bool: - return self._decompressor.eof - - @property - def unconsumed_tail(self) -> bytes: - return self._decompressor.unconsumed_tail - - @property - def unused_data(self) -> bytes: - return self._decompressor.unused_data - - -class BrotliDecompressor: - # Supports both 'brotlipy' and 'Brotli' packages - # since they share an import name. The top branches - # are for 'brotlipy' and bottom branches for 'Brotli' - def __init__(self) -> None: - if not HAS_BROTLI: - raise RuntimeError( - "The brotli decompression is not available. " - "Please install `Brotli` module" - ) - self._obj = brotli.Decompressor() - - def decompress_sync(self, data: bytes) -> bytes: - if hasattr(self._obj, "decompress"): - return cast(bytes, self._obj.decompress(data)) - return cast(bytes, self._obj.process(data)) - - def flush(self) -> bytes: - if hasattr(self._obj, "flush"): - return cast(bytes, self._obj.flush()) - return b"" diff --git a/.venv/Lib/site-packages/aiohttp/connector.py b/.venv/Lib/site-packages/aiohttp/connector.py deleted file mode 100644 index 3b9841d..0000000 --- a/.venv/Lib/site-packages/aiohttp/connector.py +++ /dev/null @@ -1,1489 +0,0 @@ -import asyncio -import functools -import random -import sys -import traceback -import warnings -from collections import defaultdict, deque -from contextlib import suppress -from http import HTTPStatus -from http.cookies import SimpleCookie -from itertools import cycle, islice -from time import monotonic -from types import TracebackType -from typing import ( - TYPE_CHECKING, - Any, - Awaitable, - Callable, - DefaultDict, - Dict, - Iterator, - List, - Literal, - Optional, - Set, - Tuple, - Type, - Union, - cast, -) - -import attr - -from . import hdrs, helpers -from .abc import AbstractResolver -from .client_exceptions import ( - ClientConnectionError, - ClientConnectorCertificateError, - ClientConnectorError, - ClientConnectorSSLError, - ClientHttpProxyError, - ClientProxyConnectionError, - ServerFingerprintMismatch, - UnixClientConnectorError, - cert_errors, - ssl_errors, -) -from .client_proto import ResponseHandler -from .client_reqrep import ClientRequest, Fingerprint, _merge_ssl_params -from .helpers import ceil_timeout, get_running_loop, is_ip_address, noop, sentinel -from .locks import EventResultOrError -from .resolver import DefaultResolver - -try: - import ssl - - SSLContext = ssl.SSLContext -except ImportError: # pragma: no cover - ssl = None # type: ignore[assignment] - SSLContext = object # type: ignore[misc,assignment] - - -__all__ = ("BaseConnector", "TCPConnector", "UnixConnector", "NamedPipeConnector") - - -if TYPE_CHECKING: - from .client import ClientTimeout - from .client_reqrep import ConnectionKey - from .tracing import Trace - - -class _DeprecationWaiter: - __slots__ = ("_awaitable", "_awaited") - - def __init__(self, awaitable: Awaitable[Any]) -> None: - self._awaitable = awaitable - self._awaited = False - - def __await__(self) -> Any: - self._awaited = True - return self._awaitable.__await__() - - def __del__(self) -> None: - if not self._awaited: - warnings.warn( - "Connector.close() is a coroutine, " - "please use await connector.close()", - DeprecationWarning, - ) - - -class Connection: - - _source_traceback = None - _transport = None - - def __init__( - self, - connector: "BaseConnector", - key: "ConnectionKey", - protocol: ResponseHandler, - loop: asyncio.AbstractEventLoop, - ) -> None: - self._key = key - self._connector = connector - self._loop = loop - self._protocol: Optional[ResponseHandler] = protocol - self._callbacks: List[Callable[[], None]] = [] - - if loop.get_debug(): - self._source_traceback = traceback.extract_stack(sys._getframe(1)) - - def __repr__(self) -> str: - return f"Connection<{self._key}>" - - def __del__(self, _warnings: Any = warnings) -> None: - if self._protocol is not None: - kwargs = {"source": self} - _warnings.warn(f"Unclosed connection {self!r}", ResourceWarning, **kwargs) - if self._loop.is_closed(): - return - - self._connector._release(self._key, self._protocol, should_close=True) - - context = {"client_connection": self, "message": "Unclosed connection"} - if self._source_traceback is not None: - context["source_traceback"] = self._source_traceback - self._loop.call_exception_handler(context) - - def __bool__(self) -> Literal[True]: - """Force subclasses to not be falsy, to make checks simpler.""" - return True - - @property - def loop(self) -> asyncio.AbstractEventLoop: - warnings.warn( - "connector.loop property is deprecated", DeprecationWarning, stacklevel=2 - ) - return self._loop - - @property - def transport(self) -> Optional[asyncio.Transport]: - if self._protocol is None: - return None - return self._protocol.transport - - @property - def protocol(self) -> Optional[ResponseHandler]: - return self._protocol - - def add_callback(self, callback: Callable[[], None]) -> None: - if callback is not None: - self._callbacks.append(callback) - - def _notify_release(self) -> None: - callbacks, self._callbacks = self._callbacks[:], [] - - for cb in callbacks: - with suppress(Exception): - cb() - - def close(self) -> None: - self._notify_release() - - if self._protocol is not None: - self._connector._release(self._key, self._protocol, should_close=True) - self._protocol = None - - def release(self) -> None: - self._notify_release() - - if self._protocol is not None: - self._connector._release( - self._key, self._protocol, should_close=self._protocol.should_close - ) - self._protocol = None - - @property - def closed(self) -> bool: - return self._protocol is None or not self._protocol.is_connected() - - -class _TransportPlaceholder: - """placeholder for BaseConnector.connect function""" - - def close(self) -> None: - pass - - -class BaseConnector: - """Base connector class. - - keepalive_timeout - (optional) Keep-alive timeout. - force_close - Set to True to force close and do reconnect - after each request (and between redirects). - limit - The total number of simultaneous connections. - limit_per_host - Number of simultaneous connections to one host. - enable_cleanup_closed - Enables clean-up closed ssl transports. - Disabled by default. - timeout_ceil_threshold - Trigger ceiling of timeout values when - it's above timeout_ceil_threshold. - loop - Optional event loop. - """ - - _closed = True # prevent AttributeError in __del__ if ctor was failed - _source_traceback = None - - # abort transport after 2 seconds (cleanup broken connections) - _cleanup_closed_period = 2.0 - - def __init__( - self, - *, - keepalive_timeout: Union[object, None, float] = sentinel, - force_close: bool = False, - limit: int = 100, - limit_per_host: int = 0, - enable_cleanup_closed: bool = False, - loop: Optional[asyncio.AbstractEventLoop] = None, - timeout_ceil_threshold: float = 5, - ) -> None: - - if force_close: - if keepalive_timeout is not None and keepalive_timeout is not sentinel: - raise ValueError( - "keepalive_timeout cannot " "be set if force_close is True" - ) - else: - if keepalive_timeout is sentinel: - keepalive_timeout = 15.0 - - loop = get_running_loop(loop) - self._timeout_ceil_threshold = timeout_ceil_threshold - - self._closed = False - if loop.get_debug(): - self._source_traceback = traceback.extract_stack(sys._getframe(1)) - - self._conns: Dict[ConnectionKey, List[Tuple[ResponseHandler, float]]] = {} - self._limit = limit - self._limit_per_host = limit_per_host - self._acquired: Set[ResponseHandler] = set() - self._acquired_per_host: DefaultDict[ - ConnectionKey, Set[ResponseHandler] - ] = defaultdict(set) - self._keepalive_timeout = cast(float, keepalive_timeout) - self._force_close = force_close - - # {host_key: FIFO list of waiters} - self._waiters = defaultdict(deque) # type: ignore[var-annotated] - - self._loop = loop - self._factory = functools.partial(ResponseHandler, loop=loop) - - self.cookies = SimpleCookie() - - # start keep-alive connection cleanup task - self._cleanup_handle: Optional[asyncio.TimerHandle] = None - - # start cleanup closed transports task - self._cleanup_closed_handle: Optional[asyncio.TimerHandle] = None - self._cleanup_closed_disabled = not enable_cleanup_closed - self._cleanup_closed_transports: List[Optional[asyncio.Transport]] = [] - self._cleanup_closed() - - def __del__(self, _warnings: Any = warnings) -> None: - if self._closed: - return - if not self._conns: - return - - conns = [repr(c) for c in self._conns.values()] - - self._close() - - kwargs = {"source": self} - _warnings.warn(f"Unclosed connector {self!r}", ResourceWarning, **kwargs) - context = { - "connector": self, - "connections": conns, - "message": "Unclosed connector", - } - if self._source_traceback is not None: - context["source_traceback"] = self._source_traceback - self._loop.call_exception_handler(context) - - def __enter__(self) -> "BaseConnector": - warnings.warn( - '"with Connector():" is deprecated, ' - 'use "async with Connector():" instead', - DeprecationWarning, - ) - return self - - def __exit__(self, *exc: Any) -> None: - self._close() - - async def __aenter__(self) -> "BaseConnector": - return self - - async def __aexit__( - self, - exc_type: Optional[Type[BaseException]] = None, - exc_value: Optional[BaseException] = None, - exc_traceback: Optional[TracebackType] = None, - ) -> None: - await self.close() - - @property - def force_close(self) -> bool: - """Ultimately close connection on releasing if True.""" - return self._force_close - - @property - def limit(self) -> int: - """The total number for simultaneous connections. - - If limit is 0 the connector has no limit. - The default limit size is 100. - """ - return self._limit - - @property - def limit_per_host(self) -> int: - """The limit for simultaneous connections to the same endpoint. - - Endpoints are the same if they are have equal - (host, port, is_ssl) triple. - """ - return self._limit_per_host - - def _cleanup(self) -> None: - """Cleanup unused transports.""" - if self._cleanup_handle: - self._cleanup_handle.cancel() - # _cleanup_handle should be unset, otherwise _release() will not - # recreate it ever! - self._cleanup_handle = None - - now = self._loop.time() - timeout = self._keepalive_timeout - - if self._conns: - connections = {} - deadline = now - timeout - for key, conns in self._conns.items(): - alive = [] - for proto, use_time in conns: - if proto.is_connected(): - if use_time - deadline < 0: - transport = proto.transport - proto.close() - if key.is_ssl and not self._cleanup_closed_disabled: - self._cleanup_closed_transports.append(transport) - else: - alive.append((proto, use_time)) - else: - transport = proto.transport - proto.close() - if key.is_ssl and not self._cleanup_closed_disabled: - self._cleanup_closed_transports.append(transport) - - if alive: - connections[key] = alive - - self._conns = connections - - if self._conns: - self._cleanup_handle = helpers.weakref_handle( - self, - "_cleanup", - timeout, - self._loop, - timeout_ceil_threshold=self._timeout_ceil_threshold, - ) - - def _drop_acquired_per_host( - self, key: "ConnectionKey", val: ResponseHandler - ) -> None: - acquired_per_host = self._acquired_per_host - if key not in acquired_per_host: - return - conns = acquired_per_host[key] - conns.remove(val) - if not conns: - del self._acquired_per_host[key] - - def _cleanup_closed(self) -> None: - """Double confirmation for transport close. - - Some broken ssl servers may leave socket open without proper close. - """ - if self._cleanup_closed_handle: - self._cleanup_closed_handle.cancel() - - for transport in self._cleanup_closed_transports: - if transport is not None: - transport.abort() - - self._cleanup_closed_transports = [] - - if not self._cleanup_closed_disabled: - self._cleanup_closed_handle = helpers.weakref_handle( - self, - "_cleanup_closed", - self._cleanup_closed_period, - self._loop, - timeout_ceil_threshold=self._timeout_ceil_threshold, - ) - - def close(self) -> Awaitable[None]: - """Close all opened transports.""" - self._close() - return _DeprecationWaiter(noop()) - - def _close(self) -> None: - if self._closed: - return - - self._closed = True - - try: - if self._loop.is_closed(): - return - - # cancel cleanup task - if self._cleanup_handle: - self._cleanup_handle.cancel() - - # cancel cleanup close task - if self._cleanup_closed_handle: - self._cleanup_closed_handle.cancel() - - for data in self._conns.values(): - for proto, t0 in data: - proto.close() - - for proto in self._acquired: - proto.close() - - for transport in self._cleanup_closed_transports: - if transport is not None: - transport.abort() - - finally: - self._conns.clear() - self._acquired.clear() - self._waiters.clear() - self._cleanup_handle = None - self._cleanup_closed_transports.clear() - self._cleanup_closed_handle = None - - @property - def closed(self) -> bool: - """Is connector closed. - - A readonly property. - """ - return self._closed - - def _available_connections(self, key: "ConnectionKey") -> int: - """ - Return number of available connections. - - The limit, limit_per_host and the connection key are taken into account. - - If it returns less than 1 means that there are no connections - available. - """ - if self._limit: - # total calc available connections - available = self._limit - len(self._acquired) - - # check limit per host - if ( - self._limit_per_host - and available > 0 - and key in self._acquired_per_host - ): - acquired = self._acquired_per_host.get(key) - assert acquired is not None - available = self._limit_per_host - len(acquired) - - elif self._limit_per_host and key in self._acquired_per_host: - # check limit per host - acquired = self._acquired_per_host.get(key) - assert acquired is not None - available = self._limit_per_host - len(acquired) - else: - available = 1 - - return available - - async def connect( - self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout" - ) -> Connection: - """Get from pool or create new connection.""" - key = req.connection_key - available = self._available_connections(key) - - # Wait if there are no available connections or if there are/were - # waiters (i.e. don't steal connection from a waiter about to wake up) - if available <= 0 or key in self._waiters: - fut = self._loop.create_future() - - # This connection will now count towards the limit. - self._waiters[key].append(fut) - - if traces: - for trace in traces: - await trace.send_connection_queued_start() - - try: - await fut - except BaseException as e: - if key in self._waiters: - # remove a waiter even if it was cancelled, normally it's - # removed when it's notified - try: - self._waiters[key].remove(fut) - except ValueError: # fut may no longer be in list - pass - - raise e - finally: - if key in self._waiters and not self._waiters[key]: - del self._waiters[key] - - if traces: - for trace in traces: - await trace.send_connection_queued_end() - - proto = self._get(key) - if proto is None: - placeholder = cast(ResponseHandler, _TransportPlaceholder()) - self._acquired.add(placeholder) - self._acquired_per_host[key].add(placeholder) - - if traces: - for trace in traces: - await trace.send_connection_create_start() - - try: - proto = await self._create_connection(req, traces, timeout) - if self._closed: - proto.close() - raise ClientConnectionError("Connector is closed.") - except BaseException: - if not self._closed: - self._acquired.remove(placeholder) - self._drop_acquired_per_host(key, placeholder) - self._release_waiter() - raise - else: - if not self._closed: - self._acquired.remove(placeholder) - self._drop_acquired_per_host(key, placeholder) - - if traces: - for trace in traces: - await trace.send_connection_create_end() - else: - if traces: - # Acquire the connection to prevent race conditions with limits - placeholder = cast(ResponseHandler, _TransportPlaceholder()) - self._acquired.add(placeholder) - self._acquired_per_host[key].add(placeholder) - for trace in traces: - await trace.send_connection_reuseconn() - self._acquired.remove(placeholder) - self._drop_acquired_per_host(key, placeholder) - - self._acquired.add(proto) - self._acquired_per_host[key].add(proto) - return Connection(self, key, proto, self._loop) - - def _get(self, key: "ConnectionKey") -> Optional[ResponseHandler]: - try: - conns = self._conns[key] - except KeyError: - return None - - t1 = self._loop.time() - while conns: - proto, t0 = conns.pop() - if proto.is_connected(): - if t1 - t0 > self._keepalive_timeout: - transport = proto.transport - proto.close() - # only for SSL transports - if key.is_ssl and not self._cleanup_closed_disabled: - self._cleanup_closed_transports.append(transport) - else: - if not conns: - # The very last connection was reclaimed: drop the key - del self._conns[key] - return proto - else: - transport = proto.transport - proto.close() - if key.is_ssl and not self._cleanup_closed_disabled: - self._cleanup_closed_transports.append(transport) - - # No more connections: drop the key - del self._conns[key] - return None - - def _release_waiter(self) -> None: - """ - Iterates over all waiters until one to be released is found. - - The one to be released is not finished and - belongs to a host that has available connections. - """ - if not self._waiters: - return - - # Having the dict keys ordered this avoids to iterate - # at the same order at each call. - queues = list(self._waiters.keys()) - random.shuffle(queues) - - for key in queues: - if self._available_connections(key) < 1: - continue - - waiters = self._waiters[key] - while waiters: - waiter = waiters.popleft() - if not waiter.done(): - waiter.set_result(None) - return - - def _release_acquired(self, key: "ConnectionKey", proto: ResponseHandler) -> None: - if self._closed: - # acquired connection is already released on connector closing - return - - try: - self._acquired.remove(proto) - self._drop_acquired_per_host(key, proto) - except KeyError: # pragma: no cover - # this may be result of undetermenistic order of objects - # finalization due garbage collection. - pass - else: - self._release_waiter() - - def _release( - self, - key: "ConnectionKey", - protocol: ResponseHandler, - *, - should_close: bool = False, - ) -> None: - if self._closed: - # acquired connection is already released on connector closing - return - - self._release_acquired(key, protocol) - - if self._force_close: - should_close = True - - if should_close or protocol.should_close: - transport = protocol.transport - protocol.close() - - if key.is_ssl and not self._cleanup_closed_disabled: - self._cleanup_closed_transports.append(transport) - else: - conns = self._conns.get(key) - if conns is None: - conns = self._conns[key] = [] - conns.append((protocol, self._loop.time())) - - if self._cleanup_handle is None: - self._cleanup_handle = helpers.weakref_handle( - self, - "_cleanup", - self._keepalive_timeout, - self._loop, - timeout_ceil_threshold=self._timeout_ceil_threshold, - ) - - async def _create_connection( - self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout" - ) -> ResponseHandler: - raise NotImplementedError() - - -class _DNSCacheTable: - def __init__(self, ttl: Optional[float] = None) -> None: - self._addrs_rr: Dict[Tuple[str, int], Tuple[Iterator[Dict[str, Any]], int]] = {} - self._timestamps: Dict[Tuple[str, int], float] = {} - self._ttl = ttl - - def __contains__(self, host: object) -> bool: - return host in self._addrs_rr - - def add(self, key: Tuple[str, int], addrs: List[Dict[str, Any]]) -> None: - self._addrs_rr[key] = (cycle(addrs), len(addrs)) - - if self._ttl is not None: - self._timestamps[key] = monotonic() - - def remove(self, key: Tuple[str, int]) -> None: - self._addrs_rr.pop(key, None) - - if self._ttl is not None: - self._timestamps.pop(key, None) - - def clear(self) -> None: - self._addrs_rr.clear() - self._timestamps.clear() - - def next_addrs(self, key: Tuple[str, int]) -> List[Dict[str, Any]]: - loop, length = self._addrs_rr[key] - addrs = list(islice(loop, length)) - # Consume one more element to shift internal state of `cycle` - next(loop) - return addrs - - def expired(self, key: Tuple[str, int]) -> bool: - if self._ttl is None: - return False - - return self._timestamps[key] + self._ttl < monotonic() - - -class TCPConnector(BaseConnector): - """TCP connector. - - verify_ssl - Set to True to check ssl certifications. - fingerprint - Pass the binary sha256 - digest of the expected certificate in DER format to verify - that the certificate the server presents matches. See also - https://en.wikipedia.org/wiki/Transport_Layer_Security#Certificate_pinning - resolver - Enable DNS lookups and use this - resolver - use_dns_cache - Use memory cache for DNS lookups. - ttl_dns_cache - Max seconds having cached a DNS entry, None forever. - family - socket address family - local_addr - local tuple of (host, port) to bind socket to - - keepalive_timeout - (optional) Keep-alive timeout. - force_close - Set to True to force close and do reconnect - after each request (and between redirects). - limit - The total number of simultaneous connections. - limit_per_host - Number of simultaneous connections to one host. - enable_cleanup_closed - Enables clean-up closed ssl transports. - Disabled by default. - loop - Optional event loop. - """ - - def __init__( - self, - *, - verify_ssl: bool = True, - fingerprint: Optional[bytes] = None, - use_dns_cache: bool = True, - ttl_dns_cache: Optional[int] = 10, - family: int = 0, - ssl_context: Optional[SSLContext] = None, - ssl: Union[bool, Fingerprint, SSLContext] = True, - local_addr: Optional[Tuple[str, int]] = None, - resolver: Optional[AbstractResolver] = None, - keepalive_timeout: Union[None, float, object] = sentinel, - force_close: bool = False, - limit: int = 100, - limit_per_host: int = 0, - enable_cleanup_closed: bool = False, - loop: Optional[asyncio.AbstractEventLoop] = None, - timeout_ceil_threshold: float = 5, - ): - super().__init__( - keepalive_timeout=keepalive_timeout, - force_close=force_close, - limit=limit, - limit_per_host=limit_per_host, - enable_cleanup_closed=enable_cleanup_closed, - loop=loop, - timeout_ceil_threshold=timeout_ceil_threshold, - ) - - self._ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint) - if resolver is None: - resolver = DefaultResolver(loop=self._loop) - self._resolver = resolver - - self._use_dns_cache = use_dns_cache - self._cached_hosts = _DNSCacheTable(ttl=ttl_dns_cache) - self._throttle_dns_events: Dict[Tuple[str, int], EventResultOrError] = {} - self._family = family - self._local_addr = local_addr - - def close(self) -> Awaitable[None]: - """Close all ongoing DNS calls.""" - for ev in self._throttle_dns_events.values(): - ev.cancel() - - return super().close() - - @property - def family(self) -> int: - """Socket family like AF_INET.""" - return self._family - - @property - def use_dns_cache(self) -> bool: - """True if local DNS caching is enabled.""" - return self._use_dns_cache - - def clear_dns_cache( - self, host: Optional[str] = None, port: Optional[int] = None - ) -> None: - """Remove specified host/port or clear all dns local cache.""" - if host is not None and port is not None: - self._cached_hosts.remove((host, port)) - elif host is not None or port is not None: - raise ValueError("either both host and port " "or none of them are allowed") - else: - self._cached_hosts.clear() - - async def _resolve_host( - self, host: str, port: int, traces: Optional[List["Trace"]] = None - ) -> List[Dict[str, Any]]: - if is_ip_address(host): - return [ - { - "hostname": host, - "host": host, - "port": port, - "family": self._family, - "proto": 0, - "flags": 0, - } - ] - - if not self._use_dns_cache: - - if traces: - for trace in traces: - await trace.send_dns_resolvehost_start(host) - - res = await self._resolver.resolve(host, port, family=self._family) - - if traces: - for trace in traces: - await trace.send_dns_resolvehost_end(host) - - return res - - key = (host, port) - - if (key in self._cached_hosts) and (not self._cached_hosts.expired(key)): - # get result early, before any await (#4014) - result = self._cached_hosts.next_addrs(key) - - if traces: - for trace in traces: - await trace.send_dns_cache_hit(host) - return result - - if key in self._throttle_dns_events: - # get event early, before any await (#4014) - event = self._throttle_dns_events[key] - if traces: - for trace in traces: - await trace.send_dns_cache_hit(host) - await event.wait() - else: - # update dict early, before any await (#4014) - self._throttle_dns_events[key] = EventResultOrError(self._loop) - if traces: - for trace in traces: - await trace.send_dns_cache_miss(host) - try: - - if traces: - for trace in traces: - await trace.send_dns_resolvehost_start(host) - - addrs = await self._resolver.resolve(host, port, family=self._family) - if traces: - for trace in traces: - await trace.send_dns_resolvehost_end(host) - - self._cached_hosts.add(key, addrs) - self._throttle_dns_events[key].set() - except BaseException as e: - # any DNS exception, independently of the implementation - # is set for the waiters to raise the same exception. - self._throttle_dns_events[key].set(exc=e) - raise - finally: - self._throttle_dns_events.pop(key) - - return self._cached_hosts.next_addrs(key) - - async def _create_connection( - self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout" - ) -> ResponseHandler: - """Create connection. - - Has same keyword arguments as BaseEventLoop.create_connection. - """ - if req.proxy: - _, proto = await self._create_proxy_connection(req, traces, timeout) - else: - _, proto = await self._create_direct_connection(req, traces, timeout) - - return proto - - @staticmethod - @functools.lru_cache(None) - def _make_ssl_context(verified: bool) -> SSLContext: - if verified: - return ssl.create_default_context() - else: - sslcontext = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) - sslcontext.options |= ssl.OP_NO_SSLv2 - sslcontext.options |= ssl.OP_NO_SSLv3 - sslcontext.check_hostname = False - sslcontext.verify_mode = ssl.CERT_NONE - try: - sslcontext.options |= ssl.OP_NO_COMPRESSION - except AttributeError as attr_err: - warnings.warn( - "{!s}: The Python interpreter is compiled " - "against OpenSSL < 1.0.0. Ref: " - "https://docs.python.org/3/library/ssl.html" - "#ssl.OP_NO_COMPRESSION".format(attr_err), - ) - sslcontext.set_default_verify_paths() - return sslcontext - - def _get_ssl_context(self, req: ClientRequest) -> Optional[SSLContext]: - """Logic to get the correct SSL context - - 0. if req.ssl is false, return None - - 1. if ssl_context is specified in req, use it - 2. if _ssl_context is specified in self, use it - 3. otherwise: - 1. if verify_ssl is not specified in req, use self.ssl_context - (will generate a default context according to self.verify_ssl) - 2. if verify_ssl is True in req, generate a default SSL context - 3. if verify_ssl is False in req, generate a SSL context that - won't verify - """ - if req.is_ssl(): - if ssl is None: # pragma: no cover - raise RuntimeError("SSL is not supported.") - sslcontext = req.ssl - if isinstance(sslcontext, ssl.SSLContext): - return sslcontext - if sslcontext is not True: - # not verified or fingerprinted - return self._make_ssl_context(False) - sslcontext = self._ssl - if isinstance(sslcontext, ssl.SSLContext): - return sslcontext - if sslcontext is not True: - # not verified or fingerprinted - return self._make_ssl_context(False) - return self._make_ssl_context(True) - else: - return None - - def _get_fingerprint(self, req: ClientRequest) -> Optional["Fingerprint"]: - ret = req.ssl - if isinstance(ret, Fingerprint): - return ret - ret = self._ssl - if isinstance(ret, Fingerprint): - return ret - return None - - async def _wrap_create_connection( - self, - *args: Any, - req: ClientRequest, - timeout: "ClientTimeout", - client_error: Type[Exception] = ClientConnectorError, - **kwargs: Any, - ) -> Tuple[asyncio.Transport, ResponseHandler]: - try: - async with ceil_timeout( - timeout.sock_connect, ceil_threshold=timeout.ceil_threshold - ): - return await self._loop.create_connection(*args, **kwargs) - except cert_errors as exc: - raise ClientConnectorCertificateError(req.connection_key, exc) from exc - except ssl_errors as exc: - raise ClientConnectorSSLError(req.connection_key, exc) from exc - except OSError as exc: - if exc.errno is None and isinstance(exc, asyncio.TimeoutError): - raise - raise client_error(req.connection_key, exc) from exc - - def _fail_on_no_start_tls(self, req: "ClientRequest") -> None: - """Raise a :py:exc:`RuntimeError` on missing ``start_tls()``. - - It is necessary for TLS-in-TLS so that it is possible to - send HTTPS queries through HTTPS proxies. - - This doesn't affect regular HTTP requests, though. - """ - if not req.is_ssl(): - return - - proxy_url = req.proxy - assert proxy_url is not None - if proxy_url.scheme != "https": - return - - self._check_loop_for_start_tls() - - def _check_loop_for_start_tls(self) -> None: - try: - self._loop.start_tls - except AttributeError as attr_exc: - raise RuntimeError( - "An HTTPS request is being sent through an HTTPS proxy. " - "This needs support for TLS in TLS but it is not implemented " - "in your runtime for the stdlib asyncio.\n\n" - "Please upgrade to Python 3.11 or higher. For more details, " - "please see:\n" - "* https://bugs.python.org/issue37179\n" - "* https://github.com/python/cpython/pull/28073\n" - "* https://docs.aiohttp.org/en/stable/" - "client_advanced.html#proxy-support\n" - "* https://github.com/aio-libs/aiohttp/discussions/6044\n", - ) from attr_exc - - def _loop_supports_start_tls(self) -> bool: - try: - self._check_loop_for_start_tls() - except RuntimeError: - return False - else: - return True - - def _warn_about_tls_in_tls( - self, - underlying_transport: asyncio.Transport, - req: ClientRequest, - ) -> None: - """Issue a warning if the requested URL has HTTPS scheme.""" - if req.request_info.url.scheme != "https": - return - - asyncio_supports_tls_in_tls = getattr( - underlying_transport, - "_start_tls_compatible", - False, - ) - - if asyncio_supports_tls_in_tls: - return - - warnings.warn( - "An HTTPS request is being sent through an HTTPS proxy. " - "This support for TLS in TLS is known to be disabled " - "in the stdlib asyncio (Python <3.11). This is why you'll probably see " - "an error in the log below.\n\n" - "It is possible to enable it via monkeypatching. " - "For more details, see:\n" - "* https://bugs.python.org/issue37179\n" - "* https://github.com/python/cpython/pull/28073\n\n" - "You can temporarily patch this as follows:\n" - "* https://docs.aiohttp.org/en/stable/client_advanced.html#proxy-support\n" - "* https://github.com/aio-libs/aiohttp/discussions/6044\n", - RuntimeWarning, - source=self, - # Why `4`? At least 3 of the calls in the stack originate - # from the methods in this class. - stacklevel=3, - ) - - async def _start_tls_connection( - self, - underlying_transport: asyncio.Transport, - req: ClientRequest, - timeout: "ClientTimeout", - client_error: Type[Exception] = ClientConnectorError, - ) -> Tuple[asyncio.BaseTransport, ResponseHandler]: - """Wrap the raw TCP transport with TLS.""" - tls_proto = self._factory() # Create a brand new proto for TLS - - # Safety of the `cast()` call here is based on the fact that - # internally `_get_ssl_context()` only returns `None` when - # `req.is_ssl()` evaluates to `False` which is never gonna happen - # in this code path. Of course, it's rather fragile - # maintainability-wise but this is to be solved separately. - sslcontext = cast(ssl.SSLContext, self._get_ssl_context(req)) - - try: - async with ceil_timeout( - timeout.sock_connect, ceil_threshold=timeout.ceil_threshold - ): - try: - tls_transport = await self._loop.start_tls( - underlying_transport, - tls_proto, - sslcontext, - server_hostname=req.server_hostname or req.host, - ssl_handshake_timeout=timeout.total, - ) - except BaseException: - # We need to close the underlying transport since - # `start_tls()` probably failed before it had a - # chance to do this: - underlying_transport.close() - raise - except cert_errors as exc: - raise ClientConnectorCertificateError(req.connection_key, exc) from exc - except ssl_errors as exc: - raise ClientConnectorSSLError(req.connection_key, exc) from exc - except OSError as exc: - if exc.errno is None and isinstance(exc, asyncio.TimeoutError): - raise - raise client_error(req.connection_key, exc) from exc - except TypeError as type_err: - # Example cause looks like this: - # TypeError: transport is not supported by start_tls() - - raise ClientConnectionError( - "Cannot initialize a TLS-in-TLS connection to host " - f"{req.host!s}:{req.port:d} through an underlying connection " - f"to an HTTPS proxy {req.proxy!s} ssl:{req.ssl or 'default'} " - f"[{type_err!s}]" - ) from type_err - else: - if tls_transport is None: - msg = "Failed to start TLS (possibly caused by closing transport)" - raise client_error(req.connection_key, OSError(msg)) - tls_proto.connection_made( - tls_transport - ) # Kick the state machine of the new TLS protocol - - return tls_transport, tls_proto - - async def _create_direct_connection( - self, - req: ClientRequest, - traces: List["Trace"], - timeout: "ClientTimeout", - *, - client_error: Type[Exception] = ClientConnectorError, - ) -> Tuple[asyncio.Transport, ResponseHandler]: - sslcontext = self._get_ssl_context(req) - fingerprint = self._get_fingerprint(req) - - host = req.url.raw_host - assert host is not None - # Replace multiple trailing dots with a single one. - # A trailing dot is only present for fully-qualified domain names. - # See https://github.com/aio-libs/aiohttp/pull/7364. - if host.endswith(".."): - host = host.rstrip(".") + "." - port = req.port - assert port is not None - host_resolved = asyncio.ensure_future( - self._resolve_host(host, port, traces=traces), loop=self._loop - ) - try: - # Cancelling this lookup should not cancel the underlying lookup - # or else the cancel event will get broadcast to all the waiters - # across all connections. - hosts = await asyncio.shield(host_resolved) - except asyncio.CancelledError: - - def drop_exception(fut: "asyncio.Future[List[Dict[str, Any]]]") -> None: - with suppress(Exception, asyncio.CancelledError): - fut.result() - - host_resolved.add_done_callback(drop_exception) - raise - except OSError as exc: - if exc.errno is None and isinstance(exc, asyncio.TimeoutError): - raise - # in case of proxy it is not ClientProxyConnectionError - # it is problem of resolving proxy ip itself - raise ClientConnectorError(req.connection_key, exc) from exc - - last_exc: Optional[Exception] = None - - for hinfo in hosts: - host = hinfo["host"] - port = hinfo["port"] - - # Strip trailing dots, certificates contain FQDN without dots. - # See https://github.com/aio-libs/aiohttp/issues/3636 - server_hostname = ( - (req.server_hostname or hinfo["hostname"]).rstrip(".") - if sslcontext - else None - ) - - try: - transp, proto = await self._wrap_create_connection( - self._factory, - host, - port, - timeout=timeout, - ssl=sslcontext, - family=hinfo["family"], - proto=hinfo["proto"], - flags=hinfo["flags"], - server_hostname=server_hostname, - local_addr=self._local_addr, - req=req, - client_error=client_error, - ) - except ClientConnectorError as exc: - last_exc = exc - continue - - if req.is_ssl() and fingerprint: - try: - fingerprint.check(transp) - except ServerFingerprintMismatch as exc: - transp.close() - if not self._cleanup_closed_disabled: - self._cleanup_closed_transports.append(transp) - last_exc = exc - continue - - return transp, proto - else: - assert last_exc is not None - raise last_exc - - async def _create_proxy_connection( - self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout" - ) -> Tuple[asyncio.BaseTransport, ResponseHandler]: - self._fail_on_no_start_tls(req) - runtime_has_start_tls = self._loop_supports_start_tls() - - headers: Dict[str, str] = {} - if req.proxy_headers is not None: - headers = req.proxy_headers # type: ignore[assignment] - headers[hdrs.HOST] = req.headers[hdrs.HOST] - - url = req.proxy - assert url is not None - proxy_req = ClientRequest( - hdrs.METH_GET, - url, - headers=headers, - auth=req.proxy_auth, - loop=self._loop, - ssl=req.ssl, - ) - - # create connection to proxy server - transport, proto = await self._create_direct_connection( - proxy_req, [], timeout, client_error=ClientProxyConnectionError - ) - - # Many HTTP proxies has buggy keepalive support. Let's not - # reuse connection but close it after processing every - # response. - proto.force_close() - - auth = proxy_req.headers.pop(hdrs.AUTHORIZATION, None) - if auth is not None: - if not req.is_ssl(): - req.headers[hdrs.PROXY_AUTHORIZATION] = auth - else: - proxy_req.headers[hdrs.PROXY_AUTHORIZATION] = auth - - if req.is_ssl(): - if runtime_has_start_tls: - self._warn_about_tls_in_tls(transport, req) - - # For HTTPS requests over HTTP proxy - # we must notify proxy to tunnel connection - # so we send CONNECT command: - # CONNECT www.python.org:443 HTTP/1.1 - # Host: www.python.org - # - # next we must do TLS handshake and so on - # to do this we must wrap raw socket into secure one - # asyncio handles this perfectly - proxy_req.method = hdrs.METH_CONNECT - proxy_req.url = req.url - key = attr.evolve( - req.connection_key, proxy=None, proxy_auth=None, proxy_headers_hash=None - ) - conn = Connection(self, key, proto, self._loop) - proxy_resp = await proxy_req.send(conn) - try: - protocol = conn._protocol - assert protocol is not None - - # read_until_eof=True will ensure the connection isn't closed - # once the response is received and processed allowing - # START_TLS to work on the connection below. - protocol.set_response_params( - read_until_eof=runtime_has_start_tls, - timeout_ceil_threshold=self._timeout_ceil_threshold, - ) - resp = await proxy_resp.start(conn) - except BaseException: - proxy_resp.close() - conn.close() - raise - else: - conn._protocol = None - conn._transport = None - try: - if resp.status != 200: - message = resp.reason - if message is None: - message = HTTPStatus(resp.status).phrase - raise ClientHttpProxyError( - proxy_resp.request_info, - resp.history, - status=resp.status, - message=message, - headers=resp.headers, - ) - if not runtime_has_start_tls: - rawsock = transport.get_extra_info("socket", default=None) - if rawsock is None: - raise RuntimeError( - "Transport does not expose socket instance" - ) - # Duplicate the socket, so now we can close proxy transport - rawsock = rawsock.dup() - except BaseException: - # It shouldn't be closed in `finally` because it's fed to - # `loop.start_tls()` and the docs say not to touch it after - # passing there. - transport.close() - raise - finally: - if not runtime_has_start_tls: - transport.close() - - if not runtime_has_start_tls: - # HTTP proxy with support for upgrade to HTTPS - sslcontext = self._get_ssl_context(req) - return await self._wrap_create_connection( - self._factory, - timeout=timeout, - ssl=sslcontext, - sock=rawsock, - server_hostname=req.host, - req=req, - ) - - return await self._start_tls_connection( - # Access the old transport for the last time before it's - # closed and forgotten forever: - transport, - req=req, - timeout=timeout, - ) - finally: - proxy_resp.close() - - return transport, proto - - -class UnixConnector(BaseConnector): - """Unix socket connector. - - path - Unix socket path. - keepalive_timeout - (optional) Keep-alive timeout. - force_close - Set to True to force close and do reconnect - after each request (and between redirects). - limit - The total number of simultaneous connections. - limit_per_host - Number of simultaneous connections to one host. - loop - Optional event loop. - """ - - def __init__( - self, - path: str, - force_close: bool = False, - keepalive_timeout: Union[object, float, None] = sentinel, - limit: int = 100, - limit_per_host: int = 0, - loop: Optional[asyncio.AbstractEventLoop] = None, - ) -> None: - super().__init__( - force_close=force_close, - keepalive_timeout=keepalive_timeout, - limit=limit, - limit_per_host=limit_per_host, - loop=loop, - ) - self._path = path - - @property - def path(self) -> str: - """Path to unix socket.""" - return self._path - - async def _create_connection( - self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout" - ) -> ResponseHandler: - try: - async with ceil_timeout( - timeout.sock_connect, ceil_threshold=timeout.ceil_threshold - ): - _, proto = await self._loop.create_unix_connection( - self._factory, self._path - ) - except OSError as exc: - if exc.errno is None and isinstance(exc, asyncio.TimeoutError): - raise - raise UnixClientConnectorError(self.path, req.connection_key, exc) from exc - - return proto - - -class NamedPipeConnector(BaseConnector): - """Named pipe connector. - - Only supported by the proactor event loop. - See also: https://docs.python.org/3/library/asyncio-eventloop.html - - path - Windows named pipe path. - keepalive_timeout - (optional) Keep-alive timeout. - force_close - Set to True to force close and do reconnect - after each request (and between redirects). - limit - The total number of simultaneous connections. - limit_per_host - Number of simultaneous connections to one host. - loop - Optional event loop. - """ - - def __init__( - self, - path: str, - force_close: bool = False, - keepalive_timeout: Union[object, float, None] = sentinel, - limit: int = 100, - limit_per_host: int = 0, - loop: Optional[asyncio.AbstractEventLoop] = None, - ) -> None: - super().__init__( - force_close=force_close, - keepalive_timeout=keepalive_timeout, - limit=limit, - limit_per_host=limit_per_host, - loop=loop, - ) - if not isinstance( - self._loop, asyncio.ProactorEventLoop # type: ignore[attr-defined] - ): - raise RuntimeError( - "Named Pipes only available in proactor " "loop under windows" - ) - self._path = path - - @property - def path(self) -> str: - """Path to the named pipe.""" - return self._path - - async def _create_connection( - self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout" - ) -> ResponseHandler: - try: - async with ceil_timeout( - timeout.sock_connect, ceil_threshold=timeout.ceil_threshold - ): - _, proto = await self._loop.create_pipe_connection( # type: ignore[attr-defined] - self._factory, self._path - ) - # the drain is required so that the connection_made is called - # and transport is set otherwise it is not set before the - # `assert conn.transport is not None` - # in client.py's _request method - await asyncio.sleep(0) - # other option is to manually set transport like - # `proto.transport = trans` - except OSError as exc: - if exc.errno is None and isinstance(exc, asyncio.TimeoutError): - raise - raise ClientConnectorError(req.connection_key, exc) from exc - - return cast(ResponseHandler, proto) diff --git a/.venv/Lib/site-packages/aiohttp/cookiejar.py b/.venv/Lib/site-packages/aiohttp/cookiejar.py deleted file mode 100644 index a348f11..0000000 --- a/.venv/Lib/site-packages/aiohttp/cookiejar.py +++ /dev/null @@ -1,419 +0,0 @@ -import asyncio -import calendar -import contextlib -import datetime -import os # noqa -import pathlib -import pickle -import re -import time -from collections import defaultdict -from http.cookies import BaseCookie, Morsel, SimpleCookie -from math import ceil -from typing import ( # noqa - DefaultDict, - Dict, - Iterable, - Iterator, - List, - Mapping, - Optional, - Set, - Tuple, - Union, - cast, -) - -from yarl import URL - -from .abc import AbstractCookieJar, ClearCookiePredicate -from .helpers import is_ip_address -from .typedefs import LooseCookies, PathLike, StrOrURL - -__all__ = ("CookieJar", "DummyCookieJar") - - -CookieItem = Union[str, "Morsel[str]"] - - -class CookieJar(AbstractCookieJar): - """Implements cookie storage adhering to RFC 6265.""" - - DATE_TOKENS_RE = re.compile( - r"[\x09\x20-\x2F\x3B-\x40\x5B-\x60\x7B-\x7E]*" - r"(?P[\x00-\x08\x0A-\x1F\d:a-zA-Z\x7F-\xFF]+)" - ) - - DATE_HMS_TIME_RE = re.compile(r"(\d{1,2}):(\d{1,2}):(\d{1,2})") - - DATE_DAY_OF_MONTH_RE = re.compile(r"(\d{1,2})") - - DATE_MONTH_RE = re.compile( - "(jan)|(feb)|(mar)|(apr)|(may)|(jun)|(jul)|" "(aug)|(sep)|(oct)|(nov)|(dec)", - re.I, - ) - - DATE_YEAR_RE = re.compile(r"(\d{2,4})") - - # calendar.timegm() fails for timestamps after datetime.datetime.max - # Minus one as a loss of precision occurs when timestamp() is called. - MAX_TIME = ( - int(datetime.datetime.max.replace(tzinfo=datetime.timezone.utc).timestamp()) - 1 - ) - try: - calendar.timegm(time.gmtime(MAX_TIME)) - except (OSError, ValueError): - # Hit the maximum representable time on Windows - # https://learn.microsoft.com/en-us/cpp/c-runtime-library/reference/localtime-localtime32-localtime64 - # Throws ValueError on PyPy 3.8 and 3.9, OSError elsewhere - MAX_TIME = calendar.timegm((3000, 12, 31, 23, 59, 59, -1, -1, -1)) - except OverflowError: - # #4515: datetime.max may not be representable on 32-bit platforms - MAX_TIME = 2**31 - 1 - # Avoid minuses in the future, 3x faster - SUB_MAX_TIME = MAX_TIME - 1 - - def __init__( - self, - *, - unsafe: bool = False, - quote_cookie: bool = True, - treat_as_secure_origin: Union[StrOrURL, List[StrOrURL], None] = None, - loop: Optional[asyncio.AbstractEventLoop] = None, - ) -> None: - super().__init__(loop=loop) - self._cookies: DefaultDict[Tuple[str, str], SimpleCookie] = defaultdict( - SimpleCookie - ) - self._host_only_cookies: Set[Tuple[str, str]] = set() - self._unsafe = unsafe - self._quote_cookie = quote_cookie - if treat_as_secure_origin is None: - treat_as_secure_origin = [] - elif isinstance(treat_as_secure_origin, URL): - treat_as_secure_origin = [treat_as_secure_origin.origin()] - elif isinstance(treat_as_secure_origin, str): - treat_as_secure_origin = [URL(treat_as_secure_origin).origin()] - else: - treat_as_secure_origin = [ - URL(url).origin() if isinstance(url, str) else url.origin() - for url in treat_as_secure_origin - ] - self._treat_as_secure_origin = treat_as_secure_origin - self._next_expiration: float = ceil(time.time()) - self._expirations: Dict[Tuple[str, str, str], float] = {} - - def save(self, file_path: PathLike) -> None: - file_path = pathlib.Path(file_path) - with file_path.open(mode="wb") as f: - pickle.dump(self._cookies, f, pickle.HIGHEST_PROTOCOL) - - def load(self, file_path: PathLike) -> None: - file_path = pathlib.Path(file_path) - with file_path.open(mode="rb") as f: - self._cookies = pickle.load(f) - - def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None: - if predicate is None: - self._next_expiration = ceil(time.time()) - self._cookies.clear() - self._host_only_cookies.clear() - self._expirations.clear() - return - - to_del = [] - now = time.time() - for (domain, path), cookie in self._cookies.items(): - for name, morsel in cookie.items(): - key = (domain, path, name) - if ( - key in self._expirations and self._expirations[key] <= now - ) or predicate(morsel): - to_del.append(key) - - for domain, path, name in to_del: - self._host_only_cookies.discard((domain, name)) - key = (domain, path, name) - if key in self._expirations: - del self._expirations[(domain, path, name)] - self._cookies[(domain, path)].pop(name, None) - - self._next_expiration = ( - min(*self._expirations.values(), self.SUB_MAX_TIME) + 1 - if self._expirations - else self.MAX_TIME - ) - - def clear_domain(self, domain: str) -> None: - self.clear(lambda x: self._is_domain_match(domain, x["domain"])) - - def __iter__(self) -> "Iterator[Morsel[str]]": - self._do_expiration() - for val in self._cookies.values(): - yield from val.values() - - def __len__(self) -> int: - return sum(1 for i in self) - - def _do_expiration(self) -> None: - self.clear(lambda x: False) - - def _expire_cookie(self, when: float, domain: str, path: str, name: str) -> None: - self._next_expiration = min(self._next_expiration, when) - self._expirations[(domain, path, name)] = when - - def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None: - """Update cookies.""" - hostname = response_url.raw_host - - if not self._unsafe and is_ip_address(hostname): - # Don't accept cookies from IPs - return - - if isinstance(cookies, Mapping): - cookies = cookies.items() - - for name, cookie in cookies: - if not isinstance(cookie, Morsel): - tmp = SimpleCookie() - tmp[name] = cookie # type: ignore[assignment] - cookie = tmp[name] - - domain = cookie["domain"] - - # ignore domains with trailing dots - if domain.endswith("."): - domain = "" - del cookie["domain"] - - if not domain and hostname is not None: - # Set the cookie's domain to the response hostname - # and set its host-only-flag - self._host_only_cookies.add((hostname, name)) - domain = cookie["domain"] = hostname - - if domain.startswith("."): - # Remove leading dot - domain = domain[1:] - cookie["domain"] = domain - - if hostname and not self._is_domain_match(domain, hostname): - # Setting cookies for different domains is not allowed - continue - - path = cookie["path"] - if not path or not path.startswith("/"): - # Set the cookie's path to the response path - path = response_url.path - if not path.startswith("/"): - path = "/" - else: - # Cut everything from the last slash to the end - path = "/" + path[1 : path.rfind("/")] - cookie["path"] = path - - max_age = cookie["max-age"] - if max_age: - try: - delta_seconds = int(max_age) - max_age_expiration = min(time.time() + delta_seconds, self.MAX_TIME) - self._expire_cookie(max_age_expiration, domain, path, name) - except ValueError: - cookie["max-age"] = "" - - else: - expires = cookie["expires"] - if expires: - expire_time = self._parse_date(expires) - if expire_time: - self._expire_cookie(expire_time, domain, path, name) - else: - cookie["expires"] = "" - - self._cookies[(domain, path)][name] = cookie - - self._do_expiration() - - def filter_cookies(self, request_url: URL = URL()) -> "BaseCookie[str]": - """Returns this jar's cookies filtered by their attributes.""" - filtered: Union[SimpleCookie, "BaseCookie[str]"] = ( - SimpleCookie() if self._quote_cookie else BaseCookie() - ) - if not self._cookies: - # Skip do_expiration() if there are no cookies. - return filtered - self._do_expiration() - if not self._cookies: - # Skip rest of function if no non-expired cookies. - return filtered - request_url = URL(request_url) - hostname = request_url.raw_host or "" - - is_not_secure = request_url.scheme not in ("https", "wss") - if is_not_secure and self._treat_as_secure_origin: - request_origin = URL() - with contextlib.suppress(ValueError): - request_origin = request_url.origin() - is_not_secure = request_origin not in self._treat_as_secure_origin - - # Point 2: https://www.rfc-editor.org/rfc/rfc6265.html#section-5.4 - for cookie in sorted(self, key=lambda c: len(c["path"])): - name = cookie.key - domain = cookie["domain"] - - # Send shared cookies - if not domain: - filtered[name] = cookie.value - continue - - if not self._unsafe and is_ip_address(hostname): - continue - - if (domain, name) in self._host_only_cookies: - if domain != hostname: - continue - elif not self._is_domain_match(domain, hostname): - continue - - if not self._is_path_match(request_url.path, cookie["path"]): - continue - - if is_not_secure and cookie["secure"]: - continue - - # It's critical we use the Morsel so the coded_value - # (based on cookie version) is preserved - mrsl_val = cast("Morsel[str]", cookie.get(cookie.key, Morsel())) - mrsl_val.set(cookie.key, cookie.value, cookie.coded_value) - filtered[name] = mrsl_val - - return filtered - - @staticmethod - def _is_domain_match(domain: str, hostname: str) -> bool: - """Implements domain matching adhering to RFC 6265.""" - if hostname == domain: - return True - - if not hostname.endswith(domain): - return False - - non_matching = hostname[: -len(domain)] - - if not non_matching.endswith("."): - return False - - return not is_ip_address(hostname) - - @staticmethod - def _is_path_match(req_path: str, cookie_path: str) -> bool: - """Implements path matching adhering to RFC 6265.""" - if not req_path.startswith("/"): - req_path = "/" - - if req_path == cookie_path: - return True - - if not req_path.startswith(cookie_path): - return False - - if cookie_path.endswith("/"): - return True - - non_matching = req_path[len(cookie_path) :] - - return non_matching.startswith("/") - - @classmethod - def _parse_date(cls, date_str: str) -> Optional[int]: - """Implements date string parsing adhering to RFC 6265.""" - if not date_str: - return None - - found_time = False - found_day = False - found_month = False - found_year = False - - hour = minute = second = 0 - day = 0 - month = 0 - year = 0 - - for token_match in cls.DATE_TOKENS_RE.finditer(date_str): - - token = token_match.group("token") - - if not found_time: - time_match = cls.DATE_HMS_TIME_RE.match(token) - if time_match: - found_time = True - hour, minute, second = (int(s) for s in time_match.groups()) - continue - - if not found_day: - day_match = cls.DATE_DAY_OF_MONTH_RE.match(token) - if day_match: - found_day = True - day = int(day_match.group()) - continue - - if not found_month: - month_match = cls.DATE_MONTH_RE.match(token) - if month_match: - found_month = True - assert month_match.lastindex is not None - month = month_match.lastindex - continue - - if not found_year: - year_match = cls.DATE_YEAR_RE.match(token) - if year_match: - found_year = True - year = int(year_match.group()) - - if 70 <= year <= 99: - year += 1900 - elif 0 <= year <= 69: - year += 2000 - - if False in (found_day, found_month, found_year, found_time): - return None - - if not 1 <= day <= 31: - return None - - if year < 1601 or hour > 23 or minute > 59 or second > 59: - return None - - return calendar.timegm((year, month, day, hour, minute, second, -1, -1, -1)) - - -class DummyCookieJar(AbstractCookieJar): - """Implements a dummy cookie storage. - - It can be used with the ClientSession when no cookie processing is needed. - - """ - - def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None: - super().__init__(loop=loop) - - def __iter__(self) -> "Iterator[Morsel[str]]": - while False: - yield None - - def __len__(self) -> int: - return 0 - - def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None: - pass - - def clear_domain(self, domain: str) -> None: - pass - - def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None: - pass - - def filter_cookies(self, request_url: URL) -> "BaseCookie[str]": - return SimpleCookie() diff --git a/.venv/Lib/site-packages/aiohttp/formdata.py b/.venv/Lib/site-packages/aiohttp/formdata.py deleted file mode 100644 index e7cd24c..0000000 --- a/.venv/Lib/site-packages/aiohttp/formdata.py +++ /dev/null @@ -1,172 +0,0 @@ -import io -from typing import Any, Iterable, List, Optional -from urllib.parse import urlencode - -from multidict import MultiDict, MultiDictProxy - -from . import hdrs, multipart, payload -from .helpers import guess_filename -from .payload import Payload - -__all__ = ("FormData",) - - -class FormData: - """Helper class for form body generation. - - Supports multipart/form-data and application/x-www-form-urlencoded. - """ - - def __init__( - self, - fields: Iterable[Any] = (), - quote_fields: bool = True, - charset: Optional[str] = None, - ) -> None: - self._writer = multipart.MultipartWriter("form-data") - self._fields: List[Any] = [] - self._is_multipart = False - self._is_processed = False - self._quote_fields = quote_fields - self._charset = charset - - if isinstance(fields, dict): - fields = list(fields.items()) - elif not isinstance(fields, (list, tuple)): - fields = (fields,) - self.add_fields(*fields) - - @property - def is_multipart(self) -> bool: - return self._is_multipart - - def add_field( - self, - name: str, - value: Any, - *, - content_type: Optional[str] = None, - filename: Optional[str] = None, - content_transfer_encoding: Optional[str] = None, - ) -> None: - - if isinstance(value, io.IOBase): - self._is_multipart = True - elif isinstance(value, (bytes, bytearray, memoryview)): - if filename is None and content_transfer_encoding is None: - filename = name - - type_options: MultiDict[str] = MultiDict({"name": name}) - if filename is not None and not isinstance(filename, str): - raise TypeError( - "filename must be an instance of str. " "Got: %s" % filename - ) - if filename is None and isinstance(value, io.IOBase): - filename = guess_filename(value, name) - if filename is not None: - type_options["filename"] = filename - self._is_multipart = True - - headers = {} - if content_type is not None: - if not isinstance(content_type, str): - raise TypeError( - "content_type must be an instance of str. " "Got: %s" % content_type - ) - headers[hdrs.CONTENT_TYPE] = content_type - self._is_multipart = True - if content_transfer_encoding is not None: - if not isinstance(content_transfer_encoding, str): - raise TypeError( - "content_transfer_encoding must be an instance" - " of str. Got: %s" % content_transfer_encoding - ) - headers[hdrs.CONTENT_TRANSFER_ENCODING] = content_transfer_encoding - self._is_multipart = True - - self._fields.append((type_options, headers, value)) - - def add_fields(self, *fields: Any) -> None: - to_add = list(fields) - - while to_add: - rec = to_add.pop(0) - - if isinstance(rec, io.IOBase): - k = guess_filename(rec, "unknown") - self.add_field(k, rec) # type: ignore[arg-type] - - elif isinstance(rec, (MultiDictProxy, MultiDict)): - to_add.extend(rec.items()) - - elif isinstance(rec, (list, tuple)) and len(rec) == 2: - k, fp = rec - self.add_field(k, fp) # type: ignore[arg-type] - - else: - raise TypeError( - "Only io.IOBase, multidict and (name, file) " - "pairs allowed, use .add_field() for passing " - "more complex parameters, got {!r}".format(rec) - ) - - def _gen_form_urlencoded(self) -> payload.BytesPayload: - # form data (x-www-form-urlencoded) - data = [] - for type_options, _, value in self._fields: - data.append((type_options["name"], value)) - - charset = self._charset if self._charset is not None else "utf-8" - - if charset == "utf-8": - content_type = "application/x-www-form-urlencoded" - else: - content_type = "application/x-www-form-urlencoded; " "charset=%s" % charset - - return payload.BytesPayload( - urlencode(data, doseq=True, encoding=charset).encode(), - content_type=content_type, - ) - - def _gen_form_data(self) -> multipart.MultipartWriter: - """Encode a list of fields using the multipart/form-data MIME format""" - if self._is_processed: - raise RuntimeError("Form data has been processed already") - for dispparams, headers, value in self._fields: - try: - if hdrs.CONTENT_TYPE in headers: - part = payload.get_payload( - value, - content_type=headers[hdrs.CONTENT_TYPE], - headers=headers, - encoding=self._charset, - ) - else: - part = payload.get_payload( - value, headers=headers, encoding=self._charset - ) - except Exception as exc: - raise TypeError( - "Can not serialize value type: %r\n " - "headers: %r\n value: %r" % (type(value), headers, value) - ) from exc - - if dispparams: - part.set_content_disposition( - "form-data", quote_fields=self._quote_fields, **dispparams - ) - # FIXME cgi.FieldStorage doesn't likes body parts with - # Content-Length which were sent via chunked transfer encoding - assert part.headers is not None - part.headers.popall(hdrs.CONTENT_LENGTH, None) - - self._writer.append_payload(part) - - self._is_processed = True - return self._writer - - def __call__(self) -> Payload: - if self._is_multipart: - return self._gen_form_data() - else: - return self._gen_form_urlencoded() diff --git a/.venv/Lib/site-packages/aiohttp/hdrs.py b/.venv/Lib/site-packages/aiohttp/hdrs.py deleted file mode 100644 index 2f1f5e0..0000000 --- a/.venv/Lib/site-packages/aiohttp/hdrs.py +++ /dev/null @@ -1,108 +0,0 @@ -"""HTTP Headers constants.""" - -# After changing the file content call ./tools/gen.py -# to regenerate the headers parser -from typing import Final, Set - -from multidict import istr - -METH_ANY: Final[str] = "*" -METH_CONNECT: Final[str] = "CONNECT" -METH_HEAD: Final[str] = "HEAD" -METH_GET: Final[str] = "GET" -METH_DELETE: Final[str] = "DELETE" -METH_OPTIONS: Final[str] = "OPTIONS" -METH_PATCH: Final[str] = "PATCH" -METH_POST: Final[str] = "POST" -METH_PUT: Final[str] = "PUT" -METH_TRACE: Final[str] = "TRACE" - -METH_ALL: Final[Set[str]] = { - METH_CONNECT, - METH_HEAD, - METH_GET, - METH_DELETE, - METH_OPTIONS, - METH_PATCH, - METH_POST, - METH_PUT, - METH_TRACE, -} - -ACCEPT: Final[istr] = istr("Accept") -ACCEPT_CHARSET: Final[istr] = istr("Accept-Charset") -ACCEPT_ENCODING: Final[istr] = istr("Accept-Encoding") -ACCEPT_LANGUAGE: Final[istr] = istr("Accept-Language") -ACCEPT_RANGES: Final[istr] = istr("Accept-Ranges") -ACCESS_CONTROL_MAX_AGE: Final[istr] = istr("Access-Control-Max-Age") -ACCESS_CONTROL_ALLOW_CREDENTIALS: Final[istr] = istr("Access-Control-Allow-Credentials") -ACCESS_CONTROL_ALLOW_HEADERS: Final[istr] = istr("Access-Control-Allow-Headers") -ACCESS_CONTROL_ALLOW_METHODS: Final[istr] = istr("Access-Control-Allow-Methods") -ACCESS_CONTROL_ALLOW_ORIGIN: Final[istr] = istr("Access-Control-Allow-Origin") -ACCESS_CONTROL_EXPOSE_HEADERS: Final[istr] = istr("Access-Control-Expose-Headers") -ACCESS_CONTROL_REQUEST_HEADERS: Final[istr] = istr("Access-Control-Request-Headers") -ACCESS_CONTROL_REQUEST_METHOD: Final[istr] = istr("Access-Control-Request-Method") -AGE: Final[istr] = istr("Age") -ALLOW: Final[istr] = istr("Allow") -AUTHORIZATION: Final[istr] = istr("Authorization") -CACHE_CONTROL: Final[istr] = istr("Cache-Control") -CONNECTION: Final[istr] = istr("Connection") -CONTENT_DISPOSITION: Final[istr] = istr("Content-Disposition") -CONTENT_ENCODING: Final[istr] = istr("Content-Encoding") -CONTENT_LANGUAGE: Final[istr] = istr("Content-Language") -CONTENT_LENGTH: Final[istr] = istr("Content-Length") -CONTENT_LOCATION: Final[istr] = istr("Content-Location") -CONTENT_MD5: Final[istr] = istr("Content-MD5") -CONTENT_RANGE: Final[istr] = istr("Content-Range") -CONTENT_TRANSFER_ENCODING: Final[istr] = istr("Content-Transfer-Encoding") -CONTENT_TYPE: Final[istr] = istr("Content-Type") -COOKIE: Final[istr] = istr("Cookie") -DATE: Final[istr] = istr("Date") -DESTINATION: Final[istr] = istr("Destination") -DIGEST: Final[istr] = istr("Digest") -ETAG: Final[istr] = istr("Etag") -EXPECT: Final[istr] = istr("Expect") -EXPIRES: Final[istr] = istr("Expires") -FORWARDED: Final[istr] = istr("Forwarded") -FROM: Final[istr] = istr("From") -HOST: Final[istr] = istr("Host") -IF_MATCH: Final[istr] = istr("If-Match") -IF_MODIFIED_SINCE: Final[istr] = istr("If-Modified-Since") -IF_NONE_MATCH: Final[istr] = istr("If-None-Match") -IF_RANGE: Final[istr] = istr("If-Range") -IF_UNMODIFIED_SINCE: Final[istr] = istr("If-Unmodified-Since") -KEEP_ALIVE: Final[istr] = istr("Keep-Alive") -LAST_EVENT_ID: Final[istr] = istr("Last-Event-ID") -LAST_MODIFIED: Final[istr] = istr("Last-Modified") -LINK: Final[istr] = istr("Link") -LOCATION: Final[istr] = istr("Location") -MAX_FORWARDS: Final[istr] = istr("Max-Forwards") -ORIGIN: Final[istr] = istr("Origin") -PRAGMA: Final[istr] = istr("Pragma") -PROXY_AUTHENTICATE: Final[istr] = istr("Proxy-Authenticate") -PROXY_AUTHORIZATION: Final[istr] = istr("Proxy-Authorization") -RANGE: Final[istr] = istr("Range") -REFERER: Final[istr] = istr("Referer") -RETRY_AFTER: Final[istr] = istr("Retry-After") -SEC_WEBSOCKET_ACCEPT: Final[istr] = istr("Sec-WebSocket-Accept") -SEC_WEBSOCKET_VERSION: Final[istr] = istr("Sec-WebSocket-Version") -SEC_WEBSOCKET_PROTOCOL: Final[istr] = istr("Sec-WebSocket-Protocol") -SEC_WEBSOCKET_EXTENSIONS: Final[istr] = istr("Sec-WebSocket-Extensions") -SEC_WEBSOCKET_KEY: Final[istr] = istr("Sec-WebSocket-Key") -SEC_WEBSOCKET_KEY1: Final[istr] = istr("Sec-WebSocket-Key1") -SERVER: Final[istr] = istr("Server") -SET_COOKIE: Final[istr] = istr("Set-Cookie") -TE: Final[istr] = istr("TE") -TRAILER: Final[istr] = istr("Trailer") -TRANSFER_ENCODING: Final[istr] = istr("Transfer-Encoding") -UPGRADE: Final[istr] = istr("Upgrade") -URI: Final[istr] = istr("URI") -USER_AGENT: Final[istr] = istr("User-Agent") -VARY: Final[istr] = istr("Vary") -VIA: Final[istr] = istr("Via") -WANT_DIGEST: Final[istr] = istr("Want-Digest") -WARNING: Final[istr] = istr("Warning") -WWW_AUTHENTICATE: Final[istr] = istr("WWW-Authenticate") -X_FORWARDED_FOR: Final[istr] = istr("X-Forwarded-For") -X_FORWARDED_HOST: Final[istr] = istr("X-Forwarded-Host") -X_FORWARDED_PROTO: Final[istr] = istr("X-Forwarded-Proto") diff --git a/.venv/Lib/site-packages/aiohttp/helpers.py b/.venv/Lib/site-packages/aiohttp/helpers.py deleted file mode 100644 index a5c762e..0000000 --- a/.venv/Lib/site-packages/aiohttp/helpers.py +++ /dev/null @@ -1,999 +0,0 @@ -"""Various helper functions""" - -import asyncio -import base64 -import binascii -import contextlib -import datetime -import enum -import functools -import inspect -import netrc -import os -import platform -import re -import sys -import time -import warnings -import weakref -from collections import namedtuple -from contextlib import suppress -from email.parser import HeaderParser -from email.utils import parsedate -from math import ceil -from pathlib import Path -from types import TracebackType -from typing import ( - Any, - Callable, - ContextManager, - Dict, - Generator, - Generic, - Iterable, - Iterator, - List, - Mapping, - Optional, - Pattern, - Protocol, - Tuple, - Type, - TypeVar, - Union, - get_args, - overload, -) -from urllib.parse import quote -from urllib.request import getproxies, proxy_bypass - -import attr -from multidict import MultiDict, MultiDictProxy, MultiMapping -from yarl import URL - -from . import hdrs -from .log import client_logger, internal_logger - -if sys.version_info >= (3, 11): - import asyncio as async_timeout -else: - import async_timeout - -__all__ = ("BasicAuth", "ChainMapProxy", "ETag") - -IS_MACOS = platform.system() == "Darwin" -IS_WINDOWS = platform.system() == "Windows" - -PY_310 = sys.version_info >= (3, 10) -PY_311 = sys.version_info >= (3, 11) - - -_T = TypeVar("_T") -_S = TypeVar("_S") - -_SENTINEL = enum.Enum("_SENTINEL", "sentinel") -sentinel = _SENTINEL.sentinel - -NO_EXTENSIONS = bool(os.environ.get("AIOHTTP_NO_EXTENSIONS")) - -DEBUG = sys.flags.dev_mode or ( - not sys.flags.ignore_environment and bool(os.environ.get("PYTHONASYNCIODEBUG")) -) - - -CHAR = {chr(i) for i in range(0, 128)} -CTL = {chr(i) for i in range(0, 32)} | { - chr(127), -} -SEPARATORS = { - "(", - ")", - "<", - ">", - "@", - ",", - ";", - ":", - "\\", - '"', - "/", - "[", - "]", - "?", - "=", - "{", - "}", - " ", - chr(9), -} -TOKEN = CHAR ^ CTL ^ SEPARATORS - - -class noop: - def __await__(self) -> Generator[None, None, None]: - yield - - -class BasicAuth(namedtuple("BasicAuth", ["login", "password", "encoding"])): - """Http basic authentication helper.""" - - def __new__( - cls, login: str, password: str = "", encoding: str = "latin1" - ) -> "BasicAuth": - if login is None: - raise ValueError("None is not allowed as login value") - - if password is None: - raise ValueError("None is not allowed as password value") - - if ":" in login: - raise ValueError('A ":" is not allowed in login (RFC 1945#section-11.1)') - - return super().__new__(cls, login, password, encoding) - - @classmethod - def decode(cls, auth_header: str, encoding: str = "latin1") -> "BasicAuth": - """Create a BasicAuth object from an Authorization HTTP header.""" - try: - auth_type, encoded_credentials = auth_header.split(" ", 1) - except ValueError: - raise ValueError("Could not parse authorization header.") - - if auth_type.lower() != "basic": - raise ValueError("Unknown authorization method %s" % auth_type) - - try: - decoded = base64.b64decode( - encoded_credentials.encode("ascii"), validate=True - ).decode(encoding) - except binascii.Error: - raise ValueError("Invalid base64 encoding.") - - try: - # RFC 2617 HTTP Authentication - # https://www.ietf.org/rfc/rfc2617.txt - # the colon must be present, but the username and password may be - # otherwise blank. - username, password = decoded.split(":", 1) - except ValueError: - raise ValueError("Invalid credentials.") - - return cls(username, password, encoding=encoding) - - @classmethod - def from_url(cls, url: URL, *, encoding: str = "latin1") -> Optional["BasicAuth"]: - """Create BasicAuth from url.""" - if not isinstance(url, URL): - raise TypeError("url should be yarl.URL instance") - if url.user is None: - return None - return cls(url.user, url.password or "", encoding=encoding) - - def encode(self) -> str: - """Encode credentials.""" - creds = (f"{self.login}:{self.password}").encode(self.encoding) - return "Basic %s" % base64.b64encode(creds).decode(self.encoding) - - -def strip_auth_from_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]: - auth = BasicAuth.from_url(url) - if auth is None: - return url, None - else: - return url.with_user(None), auth - - -def netrc_from_env() -> Optional[netrc.netrc]: - """Load netrc from file. - - Attempt to load it from the path specified by the env-var - NETRC or in the default location in the user's home directory. - - Returns None if it couldn't be found or fails to parse. - """ - netrc_env = os.environ.get("NETRC") - - if netrc_env is not None: - netrc_path = Path(netrc_env) - else: - try: - home_dir = Path.home() - except RuntimeError as e: # pragma: no cover - # if pathlib can't resolve home, it may raise a RuntimeError - client_logger.debug( - "Could not resolve home directory when " - "trying to look for .netrc file: %s", - e, - ) - return None - - netrc_path = home_dir / ("_netrc" if IS_WINDOWS else ".netrc") - - try: - return netrc.netrc(str(netrc_path)) - except netrc.NetrcParseError as e: - client_logger.warning("Could not parse .netrc file: %s", e) - except OSError as e: - netrc_exists = False - with contextlib.suppress(OSError): - netrc_exists = netrc_path.is_file() - # we couldn't read the file (doesn't exist, permissions, etc.) - if netrc_env or netrc_exists: - # only warn if the environment wanted us to load it, - # or it appears like the default file does actually exist - client_logger.warning("Could not read .netrc file: %s", e) - - return None - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class ProxyInfo: - proxy: URL - proxy_auth: Optional[BasicAuth] - - -def basicauth_from_netrc(netrc_obj: Optional[netrc.netrc], host: str) -> BasicAuth: - """ - Return :py:class:`~aiohttp.BasicAuth` credentials for ``host`` from ``netrc_obj``. - - :raises LookupError: if ``netrc_obj`` is :py:data:`None` or if no - entry is found for the ``host``. - """ - if netrc_obj is None: - raise LookupError("No .netrc file found") - auth_from_netrc = netrc_obj.authenticators(host) - - if auth_from_netrc is None: - raise LookupError(f"No entry for {host!s} found in the `.netrc` file.") - login, account, password = auth_from_netrc - - # TODO(PY311): username = login or account - # Up to python 3.10, account could be None if not specified, - # and login will be empty string if not specified. From 3.11, - # login and account will be empty string if not specified. - username = login if (login or account is None) else account - - # TODO(PY311): Remove this, as password will be empty string - # if not specified - if password is None: - password = "" - - return BasicAuth(username, password) - - -def proxies_from_env() -> Dict[str, ProxyInfo]: - proxy_urls = { - k: URL(v) - for k, v in getproxies().items() - if k in ("http", "https", "ws", "wss") - } - netrc_obj = netrc_from_env() - stripped = {k: strip_auth_from_url(v) for k, v in proxy_urls.items()} - ret = {} - for proto, val in stripped.items(): - proxy, auth = val - if proxy.scheme in ("https", "wss"): - client_logger.warning( - "%s proxies %s are not supported, ignoring", proxy.scheme.upper(), proxy - ) - continue - if netrc_obj and auth is None: - if proxy.host is not None: - try: - auth = basicauth_from_netrc(netrc_obj, proxy.host) - except LookupError: - auth = None - ret[proto] = ProxyInfo(proxy, auth) - return ret - - -def current_task( - loop: Optional[asyncio.AbstractEventLoop] = None, -) -> "Optional[asyncio.Task[Any]]": - return asyncio.current_task(loop=loop) - - -def get_running_loop( - loop: Optional[asyncio.AbstractEventLoop] = None, -) -> asyncio.AbstractEventLoop: - if loop is None: - loop = asyncio.get_event_loop() - if not loop.is_running(): - warnings.warn( - "The object should be created within an async function", - DeprecationWarning, - stacklevel=3, - ) - if loop.get_debug(): - internal_logger.warning( - "The object should be created within an async function", stack_info=True - ) - return loop - - -def isasyncgenfunction(obj: Any) -> bool: - func = getattr(inspect, "isasyncgenfunction", None) - if func is not None: - return func(obj) # type: ignore[no-any-return] - else: - return False - - -def get_env_proxy_for_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]: - """Get a permitted proxy for the given URL from the env.""" - if url.host is not None and proxy_bypass(url.host): - raise LookupError(f"Proxying is disallowed for `{url.host!r}`") - - proxies_in_env = proxies_from_env() - try: - proxy_info = proxies_in_env[url.scheme] - except KeyError: - raise LookupError(f"No proxies found for `{url!s}` in the env") - else: - return proxy_info.proxy, proxy_info.proxy_auth - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class MimeType: - type: str - subtype: str - suffix: str - parameters: "MultiDictProxy[str]" - - -@functools.lru_cache(maxsize=56) -def parse_mimetype(mimetype: str) -> MimeType: - """Parses a MIME type into its components. - - mimetype is a MIME type string. - - Returns a MimeType object. - - Example: - - >>> parse_mimetype('text/html; charset=utf-8') - MimeType(type='text', subtype='html', suffix='', - parameters={'charset': 'utf-8'}) - - """ - if not mimetype: - return MimeType( - type="", subtype="", suffix="", parameters=MultiDictProxy(MultiDict()) - ) - - parts = mimetype.split(";") - params: MultiDict[str] = MultiDict() - for item in parts[1:]: - if not item: - continue - key, _, value = item.partition("=") - params.add(key.lower().strip(), value.strip(' "')) - - fulltype = parts[0].strip().lower() - if fulltype == "*": - fulltype = "*/*" - - mtype, _, stype = fulltype.partition("/") - stype, _, suffix = stype.partition("+") - - return MimeType( - type=mtype, subtype=stype, suffix=suffix, parameters=MultiDictProxy(params) - ) - - -def guess_filename(obj: Any, default: Optional[str] = None) -> Optional[str]: - name = getattr(obj, "name", None) - if name and isinstance(name, str) and name[0] != "<" and name[-1] != ">": - return Path(name).name - return default - - -not_qtext_re = re.compile(r"[^\041\043-\133\135-\176]") -QCONTENT = {chr(i) for i in range(0x20, 0x7F)} | {"\t"} - - -def quoted_string(content: str) -> str: - """Return 7-bit content as quoted-string. - - Format content into a quoted-string as defined in RFC5322 for - Internet Message Format. Notice that this is not the 8-bit HTTP - format, but the 7-bit email format. Content must be in usascii or - a ValueError is raised. - """ - if not (QCONTENT > set(content)): - raise ValueError(f"bad content for quoted-string {content!r}") - return not_qtext_re.sub(lambda x: "\\" + x.group(0), content) - - -def content_disposition_header( - disptype: str, quote_fields: bool = True, _charset: str = "utf-8", **params: str -) -> str: - """Sets ``Content-Disposition`` header for MIME. - - This is the MIME payload Content-Disposition header from RFC 2183 - and RFC 7579 section 4.2, not the HTTP Content-Disposition from - RFC 6266. - - disptype is a disposition type: inline, attachment, form-data. - Should be valid extension token (see RFC 2183) - - quote_fields performs value quoting to 7-bit MIME headers - according to RFC 7578. Set to quote_fields to False if recipient - can take 8-bit file names and field values. - - _charset specifies the charset to use when quote_fields is True. - - params is a dict with disposition params. - """ - if not disptype or not (TOKEN > set(disptype)): - raise ValueError("bad content disposition type {!r}" "".format(disptype)) - - value = disptype - if params: - lparams = [] - for key, val in params.items(): - if not key or not (TOKEN > set(key)): - raise ValueError( - "bad content disposition parameter" " {!r}={!r}".format(key, val) - ) - if quote_fields: - if key.lower() == "filename": - qval = quote(val, "", encoding=_charset) - lparams.append((key, '"%s"' % qval)) - else: - try: - qval = quoted_string(val) - except ValueError: - qval = "".join( - (_charset, "''", quote(val, "", encoding=_charset)) - ) - lparams.append((key + "*", qval)) - else: - lparams.append((key, '"%s"' % qval)) - else: - qval = val.replace("\\", "\\\\").replace('"', '\\"') - lparams.append((key, '"%s"' % qval)) - sparams = "; ".join("=".join(pair) for pair in lparams) - value = "; ".join((value, sparams)) - return value - - -class _TSelf(Protocol, Generic[_T]): - _cache: Dict[str, _T] - - -class reify(Generic[_T]): - """Use as a class method decorator. - - It operates almost exactly like - the Python `@property` decorator, but it puts the result of the - method it decorates into the instance dict after the first call, - effectively replacing the function it decorates with an instance - variable. It is, in Python parlance, a data descriptor. - """ - - def __init__(self, wrapped: Callable[..., _T]) -> None: - self.wrapped = wrapped - self.__doc__ = wrapped.__doc__ - self.name = wrapped.__name__ - - def __get__(self, inst: _TSelf[_T], owner: Optional[Type[Any]] = None) -> _T: - try: - try: - return inst._cache[self.name] - except KeyError: - val = self.wrapped(inst) - inst._cache[self.name] = val - return val - except AttributeError: - if inst is None: - return self - raise - - def __set__(self, inst: _TSelf[_T], value: _T) -> None: - raise AttributeError("reified property is read-only") - - -reify_py = reify - -try: - from ._helpers import reify as reify_c - - if not NO_EXTENSIONS: - reify = reify_c # type: ignore[misc,assignment] -except ImportError: - pass - -_ipv4_pattern = ( - r"^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}" - r"(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$" -) -_ipv6_pattern = ( - r"^(?:(?:(?:[A-F0-9]{1,4}:){6}|(?=(?:[A-F0-9]{0,4}:){0,6}" - r"(?:[0-9]{1,3}\.){3}[0-9]{1,3}$)(([0-9A-F]{1,4}:){0,5}|:)" - r"((:[0-9A-F]{1,4}){1,5}:|:)|::(?:[A-F0-9]{1,4}:){5})" - r"(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])\.){3}" - r"(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])|(?:[A-F0-9]{1,4}:){7}" - r"[A-F0-9]{1,4}|(?=(?:[A-F0-9]{0,4}:){0,7}[A-F0-9]{0,4}$)" - r"(([0-9A-F]{1,4}:){1,7}|:)((:[0-9A-F]{1,4}){1,7}|:)|(?:[A-F0-9]{1,4}:){7}" - r":|:(:[A-F0-9]{1,4}){7})$" -) -_ipv4_regex = re.compile(_ipv4_pattern) -_ipv6_regex = re.compile(_ipv6_pattern, flags=re.IGNORECASE) -_ipv4_regexb = re.compile(_ipv4_pattern.encode("ascii")) -_ipv6_regexb = re.compile(_ipv6_pattern.encode("ascii"), flags=re.IGNORECASE) - - -def _is_ip_address( - regex: Pattern[str], regexb: Pattern[bytes], host: Optional[Union[str, bytes]] -) -> bool: - if host is None: - return False - if isinstance(host, str): - return bool(regex.match(host)) - elif isinstance(host, (bytes, bytearray, memoryview)): - return bool(regexb.match(host)) - else: - raise TypeError(f"{host} [{type(host)}] is not a str or bytes") - - -is_ipv4_address = functools.partial(_is_ip_address, _ipv4_regex, _ipv4_regexb) -is_ipv6_address = functools.partial(_is_ip_address, _ipv6_regex, _ipv6_regexb) - - -def is_ip_address(host: Optional[Union[str, bytes, bytearray, memoryview]]) -> bool: - return is_ipv4_address(host) or is_ipv6_address(host) - - -_cached_current_datetime: Optional[int] = None -_cached_formatted_datetime = "" - - -def rfc822_formatted_time() -> str: - global _cached_current_datetime - global _cached_formatted_datetime - - now = int(time.time()) - if now != _cached_current_datetime: - # Weekday and month names for HTTP date/time formatting; - # always English! - # Tuples are constants stored in codeobject! - _weekdayname = ("Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun") - _monthname = ( - "", # Dummy so we can use 1-based month numbers - "Jan", - "Feb", - "Mar", - "Apr", - "May", - "Jun", - "Jul", - "Aug", - "Sep", - "Oct", - "Nov", - "Dec", - ) - - year, month, day, hh, mm, ss, wd, *tail = time.gmtime(now) - _cached_formatted_datetime = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % ( - _weekdayname[wd], - day, - _monthname[month], - year, - hh, - mm, - ss, - ) - _cached_current_datetime = now - return _cached_formatted_datetime - - -def _weakref_handle(info: "Tuple[weakref.ref[object], str]") -> None: - ref, name = info - ob = ref() - if ob is not None: - with suppress(Exception): - getattr(ob, name)() - - -def weakref_handle( - ob: object, - name: str, - timeout: float, - loop: asyncio.AbstractEventLoop, - timeout_ceil_threshold: float = 5, -) -> Optional[asyncio.TimerHandle]: - if timeout is not None and timeout > 0: - when = loop.time() + timeout - if timeout >= timeout_ceil_threshold: - when = ceil(when) - - return loop.call_at(when, _weakref_handle, (weakref.ref(ob), name)) - return None - - -def call_later( - cb: Callable[[], Any], - timeout: float, - loop: asyncio.AbstractEventLoop, - timeout_ceil_threshold: float = 5, -) -> Optional[asyncio.TimerHandle]: - if timeout is not None and timeout > 0: - when = loop.time() + timeout - if timeout > timeout_ceil_threshold: - when = ceil(when) - return loop.call_at(when, cb) - return None - - -class TimeoutHandle: - """Timeout handle""" - - def __init__( - self, - loop: asyncio.AbstractEventLoop, - timeout: Optional[float], - ceil_threshold: float = 5, - ) -> None: - self._timeout = timeout - self._loop = loop - self._ceil_threshold = ceil_threshold - self._callbacks: List[ - Tuple[Callable[..., None], Tuple[Any, ...], Dict[str, Any]] - ] = [] - - def register( - self, callback: Callable[..., None], *args: Any, **kwargs: Any - ) -> None: - self._callbacks.append((callback, args, kwargs)) - - def close(self) -> None: - self._callbacks.clear() - - def start(self) -> Optional[asyncio.Handle]: - timeout = self._timeout - if timeout is not None and timeout > 0: - when = self._loop.time() + timeout - if timeout >= self._ceil_threshold: - when = ceil(when) - return self._loop.call_at(when, self.__call__) - else: - return None - - def timer(self) -> "BaseTimerContext": - if self._timeout is not None and self._timeout > 0: - timer = TimerContext(self._loop) - self.register(timer.timeout) - return timer - else: - return TimerNoop() - - def __call__(self) -> None: - for cb, args, kwargs in self._callbacks: - with suppress(Exception): - cb(*args, **kwargs) - - self._callbacks.clear() - - -class BaseTimerContext(ContextManager["BaseTimerContext"]): - def assert_timeout(self) -> None: - """Raise TimeoutError if timeout has been exceeded.""" - - -class TimerNoop(BaseTimerContext): - def __enter__(self) -> BaseTimerContext: - return self - - def __exit__( - self, - exc_type: Optional[Type[BaseException]], - exc_val: Optional[BaseException], - exc_tb: Optional[TracebackType], - ) -> None: - return - - -class TimerContext(BaseTimerContext): - """Low resolution timeout context manager""" - - def __init__(self, loop: asyncio.AbstractEventLoop) -> None: - self._loop = loop - self._tasks: List[asyncio.Task[Any]] = [] - self._cancelled = False - - def assert_timeout(self) -> None: - """Raise TimeoutError if timer has already been cancelled.""" - if self._cancelled: - raise asyncio.TimeoutError from None - - def __enter__(self) -> BaseTimerContext: - task = current_task(loop=self._loop) - - if task is None: - raise RuntimeError( - "Timeout context manager should be used " "inside a task" - ) - - if self._cancelled: - raise asyncio.TimeoutError from None - - self._tasks.append(task) - return self - - def __exit__( - self, - exc_type: Optional[Type[BaseException]], - exc_val: Optional[BaseException], - exc_tb: Optional[TracebackType], - ) -> Optional[bool]: - if self._tasks: - self._tasks.pop() - - if exc_type is asyncio.CancelledError and self._cancelled: - raise asyncio.TimeoutError from None - return None - - def timeout(self) -> None: - if not self._cancelled: - for task in set(self._tasks): - task.cancel() - - self._cancelled = True - - -def ceil_timeout( - delay: Optional[float], ceil_threshold: float = 5 -) -> async_timeout.Timeout: - if delay is None or delay <= 0: - return async_timeout.timeout(None) - - loop = get_running_loop() - now = loop.time() - when = now + delay - if delay > ceil_threshold: - when = ceil(when) - return async_timeout.timeout_at(when) - - -class HeadersMixin: - ATTRS = frozenset(["_content_type", "_content_dict", "_stored_content_type"]) - - _headers: MultiMapping[str] - - _content_type: Optional[str] = None - _content_dict: Optional[Dict[str, str]] = None - _stored_content_type: Union[str, None, _SENTINEL] = sentinel - - def _parse_content_type(self, raw: Optional[str]) -> None: - self._stored_content_type = raw - if raw is None: - # default value according to RFC 2616 - self._content_type = "application/octet-stream" - self._content_dict = {} - else: - msg = HeaderParser().parsestr("Content-Type: " + raw) - self._content_type = msg.get_content_type() - params = msg.get_params(()) - self._content_dict = dict(params[1:]) # First element is content type again - - @property - def content_type(self) -> str: - """The value of content part for Content-Type HTTP header.""" - raw = self._headers.get(hdrs.CONTENT_TYPE) - if self._stored_content_type != raw: - self._parse_content_type(raw) - return self._content_type # type: ignore[return-value] - - @property - def charset(self) -> Optional[str]: - """The value of charset part for Content-Type HTTP header.""" - raw = self._headers.get(hdrs.CONTENT_TYPE) - if self._stored_content_type != raw: - self._parse_content_type(raw) - return self._content_dict.get("charset") # type: ignore[union-attr] - - @property - def content_length(self) -> Optional[int]: - """The value of Content-Length HTTP header.""" - content_length = self._headers.get(hdrs.CONTENT_LENGTH) - - if content_length is not None: - return int(content_length) - else: - return None - - -def set_result(fut: "asyncio.Future[_T]", result: _T) -> None: - if not fut.done(): - fut.set_result(result) - - -def set_exception(fut: "asyncio.Future[_T]", exc: BaseException) -> None: - if not fut.done(): - fut.set_exception(exc) - - -@functools.total_ordering -class AppKey(Generic[_T]): - """Keys for static typing support in Application.""" - - __slots__ = ("_name", "_t", "__orig_class__") - - # This may be set by Python when instantiating with a generic type. We need to - # support this, in order to support types that are not concrete classes, - # like Iterable, which can't be passed as the second parameter to __init__. - __orig_class__: Type[object] - - def __init__(self, name: str, t: Optional[Type[_T]] = None): - # Prefix with module name to help deduplicate key names. - frame = inspect.currentframe() - while frame: - if frame.f_code.co_name == "": - module: str = frame.f_globals["__name__"] - break - frame = frame.f_back - - self._name = module + "." + name - self._t = t - - def __lt__(self, other: object) -> bool: - if isinstance(other, AppKey): - return self._name < other._name - return True # Order AppKey above other types. - - def __repr__(self) -> str: - t = self._t - if t is None: - with suppress(AttributeError): - # Set to type arg. - t = get_args(self.__orig_class__)[0] - - if t is None: - t_repr = "<>" - elif isinstance(t, type): - if t.__module__ == "builtins": - t_repr = t.__qualname__ - else: - t_repr = f"{t.__module__}.{t.__qualname__}" - else: - t_repr = repr(t) - return f"" - - -class ChainMapProxy(Mapping[Union[str, AppKey[Any]], Any]): - __slots__ = ("_maps",) - - def __init__(self, maps: Iterable[Mapping[Union[str, AppKey[Any]], Any]]) -> None: - self._maps = tuple(maps) - - def __init_subclass__(cls) -> None: - raise TypeError( - "Inheritance class {} from ChainMapProxy " - "is forbidden".format(cls.__name__) - ) - - @overload # type: ignore[override] - def __getitem__(self, key: AppKey[_T]) -> _T: - ... - - @overload - def __getitem__(self, key: str) -> Any: - ... - - def __getitem__(self, key: Union[str, AppKey[_T]]) -> Any: - for mapping in self._maps: - try: - return mapping[key] - except KeyError: - pass - raise KeyError(key) - - @overload # type: ignore[override] - def get(self, key: AppKey[_T], default: _S) -> Union[_T, _S]: - ... - - @overload - def get(self, key: AppKey[_T], default: None = ...) -> Optional[_T]: - ... - - @overload - def get(self, key: str, default: Any = ...) -> Any: - ... - - def get(self, key: Union[str, AppKey[_T]], default: Any = None) -> Any: - try: - return self[key] - except KeyError: - return default - - def __len__(self) -> int: - # reuses stored hash values if possible - return len(set().union(*self._maps)) - - def __iter__(self) -> Iterator[Union[str, AppKey[Any]]]: - d: Dict[Union[str, AppKey[Any]], Any] = {} - for mapping in reversed(self._maps): - # reuses stored hash values if possible - d.update(mapping) - return iter(d) - - def __contains__(self, key: object) -> bool: - return any(key in m for m in self._maps) - - def __bool__(self) -> bool: - return any(self._maps) - - def __repr__(self) -> str: - content = ", ".join(map(repr, self._maps)) - return f"ChainMapProxy({content})" - - -# https://tools.ietf.org/html/rfc7232#section-2.3 -_ETAGC = r"[!\x23-\x7E\x80-\xff]+" -_ETAGC_RE = re.compile(_ETAGC) -_QUOTED_ETAG = rf'(W/)?"({_ETAGC})"' -QUOTED_ETAG_RE = re.compile(_QUOTED_ETAG) -LIST_QUOTED_ETAG_RE = re.compile(rf"({_QUOTED_ETAG})(?:\s*,\s*|$)|(.)") - -ETAG_ANY = "*" - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class ETag: - value: str - is_weak: bool = False - - -def validate_etag_value(value: str) -> None: - if value != ETAG_ANY and not _ETAGC_RE.fullmatch(value): - raise ValueError( - f"Value {value!r} is not a valid etag. Maybe it contains '\"'?" - ) - - -def parse_http_date(date_str: Optional[str]) -> Optional[datetime.datetime]: - """Process a date string, return a datetime object""" - if date_str is not None: - timetuple = parsedate(date_str) - if timetuple is not None: - with suppress(ValueError): - return datetime.datetime(*timetuple[:6], tzinfo=datetime.timezone.utc) - return None - - -def must_be_empty_body(method: str, code: int) -> bool: - """Check if a request must return an empty body.""" - return ( - status_code_must_be_empty_body(code) - or method_must_be_empty_body(method) - or (200 <= code < 300 and method.upper() == hdrs.METH_CONNECT) - ) - - -def method_must_be_empty_body(method: str) -> bool: - """Check if a method must return an empty body.""" - # https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1 - # https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.2 - return method.upper() == hdrs.METH_HEAD - - -def status_code_must_be_empty_body(code: int) -> bool: - """Check if a status code must return an empty body.""" - # https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1 - return code in {204, 304} or 100 <= code < 200 - - -def should_remove_content_length(method: str, code: int) -> bool: - """Check if a Content-Length header should be removed. - - This should always be a subset of must_be_empty_body - """ - # https://www.rfc-editor.org/rfc/rfc9110.html#section-8.6-8 - # https://www.rfc-editor.org/rfc/rfc9110.html#section-15.4.5-4 - return ( - code in {204, 304} - or 100 <= code < 200 - or (200 <= code < 300 and method.upper() == hdrs.METH_CONNECT) - ) diff --git a/.venv/Lib/site-packages/aiohttp/http.py b/.venv/Lib/site-packages/aiohttp/http.py deleted file mode 100644 index a1feae2..0000000 --- a/.venv/Lib/site-packages/aiohttp/http.py +++ /dev/null @@ -1,72 +0,0 @@ -import sys -from http import HTTPStatus -from typing import Mapping, Tuple - -from . import __version__ -from .http_exceptions import HttpProcessingError as HttpProcessingError -from .http_parser import ( - HeadersParser as HeadersParser, - HttpParser as HttpParser, - HttpRequestParser as HttpRequestParser, - HttpResponseParser as HttpResponseParser, - RawRequestMessage as RawRequestMessage, - RawResponseMessage as RawResponseMessage, -) -from .http_websocket import ( - WS_CLOSED_MESSAGE as WS_CLOSED_MESSAGE, - WS_CLOSING_MESSAGE as WS_CLOSING_MESSAGE, - WS_KEY as WS_KEY, - WebSocketError as WebSocketError, - WebSocketReader as WebSocketReader, - WebSocketWriter as WebSocketWriter, - WSCloseCode as WSCloseCode, - WSMessage as WSMessage, - WSMsgType as WSMsgType, - ws_ext_gen as ws_ext_gen, - ws_ext_parse as ws_ext_parse, -) -from .http_writer import ( - HttpVersion as HttpVersion, - HttpVersion10 as HttpVersion10, - HttpVersion11 as HttpVersion11, - StreamWriter as StreamWriter, -) - -__all__ = ( - "HttpProcessingError", - "RESPONSES", - "SERVER_SOFTWARE", - # .http_writer - "StreamWriter", - "HttpVersion", - "HttpVersion10", - "HttpVersion11", - # .http_parser - "HeadersParser", - "HttpParser", - "HttpRequestParser", - "HttpResponseParser", - "RawRequestMessage", - "RawResponseMessage", - # .http_websocket - "WS_CLOSED_MESSAGE", - "WS_CLOSING_MESSAGE", - "WS_KEY", - "WebSocketReader", - "WebSocketWriter", - "ws_ext_gen", - "ws_ext_parse", - "WSMessage", - "WebSocketError", - "WSMsgType", - "WSCloseCode", -) - - -SERVER_SOFTWARE: str = "Python/{0[0]}.{0[1]} aiohttp/{1}".format( - sys.version_info, __version__ -) - -RESPONSES: Mapping[int, Tuple[str, str]] = { - v: (v.phrase, v.description) for v in HTTPStatus.__members__.values() -} diff --git a/.venv/Lib/site-packages/aiohttp/http_exceptions.py b/.venv/Lib/site-packages/aiohttp/http_exceptions.py deleted file mode 100644 index 72eac3a..0000000 --- a/.venv/Lib/site-packages/aiohttp/http_exceptions.py +++ /dev/null @@ -1,106 +0,0 @@ -"""Low-level http related exceptions.""" - - -from textwrap import indent -from typing import Optional, Union - -from .typedefs import _CIMultiDict - -__all__ = ("HttpProcessingError",) - - -class HttpProcessingError(Exception): - """HTTP error. - - Shortcut for raising HTTP errors with custom code, message and headers. - - code: HTTP Error code. - message: (optional) Error message. - headers: (optional) Headers to be sent in response, a list of pairs - """ - - code = 0 - message = "" - headers = None - - def __init__( - self, - *, - code: Optional[int] = None, - message: str = "", - headers: Optional[_CIMultiDict] = None, - ) -> None: - if code is not None: - self.code = code - self.headers = headers - self.message = message - - def __str__(self) -> str: - msg = indent(self.message, " ") - return f"{self.code}, message:\n{msg}" - - def __repr__(self) -> str: - return f"<{self.__class__.__name__}: {self.code}, message={self.message!r}>" - - -class BadHttpMessage(HttpProcessingError): - - code = 400 - message = "Bad Request" - - def __init__(self, message: str, *, headers: Optional[_CIMultiDict] = None) -> None: - super().__init__(message=message, headers=headers) - self.args = (message,) - - -class HttpBadRequest(BadHttpMessage): - - code = 400 - message = "Bad Request" - - -class PayloadEncodingError(BadHttpMessage): - """Base class for payload errors""" - - -class ContentEncodingError(PayloadEncodingError): - """Content encoding error.""" - - -class TransferEncodingError(PayloadEncodingError): - """transfer encoding error.""" - - -class ContentLengthError(PayloadEncodingError): - """Not enough data for satisfy content length header.""" - - -class LineTooLong(BadHttpMessage): - def __init__( - self, line: str, limit: str = "Unknown", actual_size: str = "Unknown" - ) -> None: - super().__init__( - f"Got more than {limit} bytes ({actual_size}) when reading {line}." - ) - self.args = (line, limit, actual_size) - - -class InvalidHeader(BadHttpMessage): - def __init__(self, hdr: Union[bytes, str]) -> None: - hdr_s = hdr.decode(errors="backslashreplace") if isinstance(hdr, bytes) else hdr - super().__init__(f"Invalid HTTP header: {hdr!r}") - self.hdr = hdr_s - self.args = (hdr,) - - -class BadStatusLine(BadHttpMessage): - def __init__(self, line: str = "", error: Optional[str] = None) -> None: - if not isinstance(line, str): - line = repr(line) - super().__init__(error or f"Bad status line {line!r}") - self.args = (line,) - self.line = line - - -class InvalidURLError(BadHttpMessage): - pass diff --git a/.venv/Lib/site-packages/aiohttp/http_parser.py b/.venv/Lib/site-packages/aiohttp/http_parser.py deleted file mode 100644 index 1877f55..0000000 --- a/.venv/Lib/site-packages/aiohttp/http_parser.py +++ /dev/null @@ -1,1013 +0,0 @@ -import abc -import asyncio -import re -import string -from contextlib import suppress -from enum import IntEnum -from typing import ( - Any, - ClassVar, - Final, - Generic, - List, - Literal, - NamedTuple, - Optional, - Pattern, - Set, - Tuple, - Type, - TypeVar, - Union, -) - -from multidict import CIMultiDict, CIMultiDictProxy, istr -from yarl import URL - -from . import hdrs -from .base_protocol import BaseProtocol -from .compression_utils import HAS_BROTLI, BrotliDecompressor, ZLibDecompressor -from .helpers import ( - DEBUG, - NO_EXTENSIONS, - BaseTimerContext, - method_must_be_empty_body, - status_code_must_be_empty_body, -) -from .http_exceptions import ( - BadHttpMessage, - BadStatusLine, - ContentEncodingError, - ContentLengthError, - InvalidHeader, - InvalidURLError, - LineTooLong, - TransferEncodingError, -) -from .http_writer import HttpVersion, HttpVersion10 -from .log import internal_logger -from .streams import EMPTY_PAYLOAD, StreamReader -from .typedefs import RawHeaders - -__all__ = ( - "HeadersParser", - "HttpParser", - "HttpRequestParser", - "HttpResponseParser", - "RawRequestMessage", - "RawResponseMessage", -) - -_SEP = Literal[b"\r\n", b"\n"] - -ASCIISET: Final[Set[str]] = set(string.printable) - -# See https://www.rfc-editor.org/rfc/rfc9110.html#name-overview -# and https://www.rfc-editor.org/rfc/rfc9110.html#name-tokens -# -# method = token -# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" / "+" / "-" / "." / -# "^" / "_" / "`" / "|" / "~" / DIGIT / ALPHA -# token = 1*tchar -_TCHAR_SPECIALS: Final[str] = re.escape("!#$%&'*+-.^_`|~") -TOKENRE: Final[Pattern[str]] = re.compile(f"[0-9A-Za-z{_TCHAR_SPECIALS}]+") -VERSRE: Final[Pattern[str]] = re.compile(r"HTTP/(\d)\.(\d)", re.ASCII) -DIGITS: Final[Pattern[str]] = re.compile(r"\d+", re.ASCII) -HEXDIGITS: Final[Pattern[bytes]] = re.compile(rb"[0-9a-fA-F]+") - - -class RawRequestMessage(NamedTuple): - method: str - path: str - version: HttpVersion - headers: "CIMultiDictProxy[str]" - raw_headers: RawHeaders - should_close: bool - compression: Optional[str] - upgrade: bool - chunked: bool - url: URL - - -class RawResponseMessage(NamedTuple): - version: HttpVersion - code: int - reason: str - headers: CIMultiDictProxy[str] - raw_headers: RawHeaders - should_close: bool - compression: Optional[str] - upgrade: bool - chunked: bool - - -_MsgT = TypeVar("_MsgT", RawRequestMessage, RawResponseMessage) - - -class ParseState(IntEnum): - - PARSE_NONE = 0 - PARSE_LENGTH = 1 - PARSE_CHUNKED = 2 - PARSE_UNTIL_EOF = 3 - - -class ChunkState(IntEnum): - PARSE_CHUNKED_SIZE = 0 - PARSE_CHUNKED_CHUNK = 1 - PARSE_CHUNKED_CHUNK_EOF = 2 - PARSE_MAYBE_TRAILERS = 3 - PARSE_TRAILERS = 4 - - -class HeadersParser: - def __init__( - self, - max_line_size: int = 8190, - max_headers: int = 32768, - max_field_size: int = 8190, - ) -> None: - self.max_line_size = max_line_size - self.max_headers = max_headers - self.max_field_size = max_field_size - - def parse_headers( - self, lines: List[bytes] - ) -> Tuple["CIMultiDictProxy[str]", RawHeaders]: - headers: CIMultiDict[str] = CIMultiDict() - # note: "raw" does not mean inclusion of OWS before/after the field value - raw_headers = [] - - lines_idx = 1 - line = lines[1] - line_count = len(lines) - - while line: - # Parse initial header name : value pair. - try: - bname, bvalue = line.split(b":", 1) - except ValueError: - raise InvalidHeader(line) from None - - if len(bname) == 0: - raise InvalidHeader(bname) - - # https://www.rfc-editor.org/rfc/rfc9112.html#section-5.1-2 - if {bname[0], bname[-1]} & {32, 9}: # {" ", "\t"} - raise InvalidHeader(line) - - bvalue = bvalue.lstrip(b" \t") - if len(bname) > self.max_field_size: - raise LineTooLong( - "request header name {}".format( - bname.decode("utf8", "backslashreplace") - ), - str(self.max_field_size), - str(len(bname)), - ) - name = bname.decode("utf-8", "surrogateescape") - if not TOKENRE.fullmatch(name): - raise InvalidHeader(bname) - - header_length = len(bvalue) - - # next line - lines_idx += 1 - line = lines[lines_idx] - - # consume continuation lines - continuation = line and line[0] in (32, 9) # (' ', '\t') - - # Deprecated: https://www.rfc-editor.org/rfc/rfc9112.html#name-obsolete-line-folding - if continuation: - bvalue_lst = [bvalue] - while continuation: - header_length += len(line) - if header_length > self.max_field_size: - raise LineTooLong( - "request header field {}".format( - bname.decode("utf8", "backslashreplace") - ), - str(self.max_field_size), - str(header_length), - ) - bvalue_lst.append(line) - - # next line - lines_idx += 1 - if lines_idx < line_count: - line = lines[lines_idx] - if line: - continuation = line[0] in (32, 9) # (' ', '\t') - else: - line = b"" - break - bvalue = b"".join(bvalue_lst) - else: - if header_length > self.max_field_size: - raise LineTooLong( - "request header field {}".format( - bname.decode("utf8", "backslashreplace") - ), - str(self.max_field_size), - str(header_length), - ) - - bvalue = bvalue.strip(b" \t") - value = bvalue.decode("utf-8", "surrogateescape") - - # https://www.rfc-editor.org/rfc/rfc9110.html#section-5.5-5 - if "\n" in value or "\r" in value or "\x00" in value: - raise InvalidHeader(bvalue) - - headers.add(name, value) - raw_headers.append((bname, bvalue)) - - return (CIMultiDictProxy(headers), tuple(raw_headers)) - - -class HttpParser(abc.ABC, Generic[_MsgT]): - lax: ClassVar[bool] = False - - def __init__( - self, - protocol: Optional[BaseProtocol] = None, - loop: Optional[asyncio.AbstractEventLoop] = None, - limit: int = 2**16, - max_line_size: int = 8190, - max_headers: int = 32768, - max_field_size: int = 8190, - timer: Optional[BaseTimerContext] = None, - code: Optional[int] = None, - method: Optional[str] = None, - readall: bool = False, - payload_exception: Optional[Type[BaseException]] = None, - response_with_body: bool = True, - read_until_eof: bool = False, - auto_decompress: bool = True, - ) -> None: - self.protocol = protocol - self.loop = loop - self.max_line_size = max_line_size - self.max_headers = max_headers - self.max_field_size = max_field_size - self.timer = timer - self.code = code - self.method = method - self.readall = readall - self.payload_exception = payload_exception - self.response_with_body = response_with_body - self.read_until_eof = read_until_eof - - self._lines: List[bytes] = [] - self._tail = b"" - self._upgraded = False - self._payload = None - self._payload_parser: Optional[HttpPayloadParser] = None - self._auto_decompress = auto_decompress - self._limit = limit - self._headers_parser = HeadersParser(max_line_size, max_headers, max_field_size) - - @abc.abstractmethod - def parse_message(self, lines: List[bytes]) -> _MsgT: - pass - - def feed_eof(self) -> Optional[_MsgT]: - if self._payload_parser is not None: - self._payload_parser.feed_eof() - self._payload_parser = None - else: - # try to extract partial message - if self._tail: - self._lines.append(self._tail) - - if self._lines: - if self._lines[-1] != "\r\n": - self._lines.append(b"") - with suppress(Exception): - return self.parse_message(self._lines) - return None - - def feed_data( - self, - data: bytes, - SEP: _SEP = b"\r\n", - EMPTY: bytes = b"", - CONTENT_LENGTH: istr = hdrs.CONTENT_LENGTH, - METH_CONNECT: str = hdrs.METH_CONNECT, - SEC_WEBSOCKET_KEY1: istr = hdrs.SEC_WEBSOCKET_KEY1, - ) -> Tuple[List[Tuple[_MsgT, StreamReader]], bool, bytes]: - - messages = [] - - if self._tail: - data, self._tail = self._tail + data, b"" - - data_len = len(data) - start_pos = 0 - loop = self.loop - - while start_pos < data_len: - - # read HTTP message (request/response line + headers), \r\n\r\n - # and split by lines - if self._payload_parser is None and not self._upgraded: - pos = data.find(SEP, start_pos) - # consume \r\n - if pos == start_pos and not self._lines: - start_pos = pos + len(SEP) - continue - - if pos >= start_pos: - # line found - line = data[start_pos:pos] - if SEP == b"\n": # For lax response parsing - line = line.rstrip(b"\r") - self._lines.append(line) - start_pos = pos + len(SEP) - - # \r\n\r\n found - if self._lines[-1] == EMPTY: - try: - msg: _MsgT = self.parse_message(self._lines) - finally: - self._lines.clear() - - def get_content_length() -> Optional[int]: - # payload length - length_hdr = msg.headers.get(CONTENT_LENGTH) - if length_hdr is None: - return None - - # Shouldn't allow +/- or other number formats. - # https://www.rfc-editor.org/rfc/rfc9110#section-8.6-2 - # msg.headers is already stripped of leading/trailing wsp - if not DIGITS.fullmatch(length_hdr): - raise InvalidHeader(CONTENT_LENGTH) - - return int(length_hdr) - - length = get_content_length() - # do not support old websocket spec - if SEC_WEBSOCKET_KEY1 in msg.headers: - raise InvalidHeader(SEC_WEBSOCKET_KEY1) - - self._upgraded = msg.upgrade - - method = getattr(msg, "method", self.method) - # code is only present on responses - code = getattr(msg, "code", 0) - - assert self.protocol is not None - # calculate payload - empty_body = status_code_must_be_empty_body(code) or bool( - method and method_must_be_empty_body(method) - ) - if not empty_body and ( - (length is not None and length > 0) - or msg.chunked - and not msg.upgrade - ): - payload = StreamReader( - self.protocol, - timer=self.timer, - loop=loop, - limit=self._limit, - ) - payload_parser = HttpPayloadParser( - payload, - length=length, - chunked=msg.chunked, - method=method, - compression=msg.compression, - code=self.code, - readall=self.readall, - response_with_body=self.response_with_body, - auto_decompress=self._auto_decompress, - lax=self.lax, - ) - if not payload_parser.done: - self._payload_parser = payload_parser - elif method == METH_CONNECT: - assert isinstance(msg, RawRequestMessage) - payload = StreamReader( - self.protocol, - timer=self.timer, - loop=loop, - limit=self._limit, - ) - self._upgraded = True - self._payload_parser = HttpPayloadParser( - payload, - method=msg.method, - compression=msg.compression, - readall=True, - auto_decompress=self._auto_decompress, - lax=self.lax, - ) - elif not empty_body and length is None and self.read_until_eof: - payload = StreamReader( - self.protocol, - timer=self.timer, - loop=loop, - limit=self._limit, - ) - payload_parser = HttpPayloadParser( - payload, - length=length, - chunked=msg.chunked, - method=method, - compression=msg.compression, - code=self.code, - readall=True, - response_with_body=self.response_with_body, - auto_decompress=self._auto_decompress, - lax=self.lax, - ) - if not payload_parser.done: - self._payload_parser = payload_parser - else: - payload = EMPTY_PAYLOAD - - messages.append((msg, payload)) - else: - self._tail = data[start_pos:] - data = EMPTY - break - - # no parser, just store - elif self._payload_parser is None and self._upgraded: - assert not self._lines - break - - # feed payload - elif data and start_pos < data_len: - assert not self._lines - assert self._payload_parser is not None - try: - eof, data = self._payload_parser.feed_data(data[start_pos:], SEP) - except BaseException as exc: - if self.payload_exception is not None: - self._payload_parser.payload.set_exception( - self.payload_exception(str(exc)) - ) - else: - self._payload_parser.payload.set_exception(exc) - - eof = True - data = b"" - - if eof: - start_pos = 0 - data_len = len(data) - self._payload_parser = None - continue - else: - break - - if data and start_pos < data_len: - data = data[start_pos:] - else: - data = EMPTY - - return messages, self._upgraded, data - - def parse_headers( - self, lines: List[bytes] - ) -> Tuple[ - "CIMultiDictProxy[str]", RawHeaders, Optional[bool], Optional[str], bool, bool - ]: - """Parses RFC 5322 headers from a stream. - - Line continuations are supported. Returns list of header name - and value pairs. Header name is in upper case. - """ - headers, raw_headers = self._headers_parser.parse_headers(lines) - close_conn = None - encoding = None - upgrade = False - chunked = False - - # https://www.rfc-editor.org/rfc/rfc9110.html#section-5.5-6 - # https://www.rfc-editor.org/rfc/rfc9110.html#name-collected-abnf - singletons = ( - hdrs.CONTENT_LENGTH, - hdrs.CONTENT_LOCATION, - hdrs.CONTENT_RANGE, - hdrs.CONTENT_TYPE, - hdrs.ETAG, - hdrs.HOST, - hdrs.MAX_FORWARDS, - hdrs.SERVER, - hdrs.TRANSFER_ENCODING, - hdrs.USER_AGENT, - ) - bad_hdr = next((h for h in singletons if len(headers.getall(h, ())) > 1), None) - if bad_hdr is not None: - raise BadHttpMessage(f"Duplicate '{bad_hdr}' header found.") - - # keep-alive - conn = headers.get(hdrs.CONNECTION) - if conn: - v = conn.lower() - if v == "close": - close_conn = True - elif v == "keep-alive": - close_conn = False - # https://www.rfc-editor.org/rfc/rfc9110.html#name-101-switching-protocols - elif v == "upgrade" and headers.get(hdrs.UPGRADE): - upgrade = True - - # encoding - enc = headers.get(hdrs.CONTENT_ENCODING) - if enc: - enc = enc.lower() - if enc in ("gzip", "deflate", "br"): - encoding = enc - - # chunking - te = headers.get(hdrs.TRANSFER_ENCODING) - if te is not None: - if "chunked" == te.lower(): - chunked = True - else: - raise BadHttpMessage("Request has invalid `Transfer-Encoding`") - - if hdrs.CONTENT_LENGTH in headers: - raise BadHttpMessage( - "Transfer-Encoding can't be present with Content-Length", - ) - - return (headers, raw_headers, close_conn, encoding, upgrade, chunked) - - def set_upgraded(self, val: bool) -> None: - """Set connection upgraded (to websocket) mode. - - :param bool val: new state. - """ - self._upgraded = val - - -class HttpRequestParser(HttpParser[RawRequestMessage]): - """Read request status line. - - Exception .http_exceptions.BadStatusLine - could be raised in case of any errors in status line. - Returns RawRequestMessage. - """ - - def parse_message(self, lines: List[bytes]) -> RawRequestMessage: - # request line - line = lines[0].decode("utf-8", "surrogateescape") - try: - method, path, version = line.split(" ", maxsplit=2) - except ValueError: - raise BadStatusLine(line) from None - - if len(path) > self.max_line_size: - raise LineTooLong( - "Status line is too long", str(self.max_line_size), str(len(path)) - ) - - # method - if not TOKENRE.fullmatch(method): - raise BadStatusLine(method) - - # version - match = VERSRE.fullmatch(version) - if match is None: - raise BadStatusLine(line) - version_o = HttpVersion(int(match.group(1)), int(match.group(2))) - - if method == "CONNECT": - # authority-form, - # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.3 - url = URL.build(authority=path, encoded=True) - elif path.startswith("/"): - # origin-form, - # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.1 - path_part, _hash_separator, url_fragment = path.partition("#") - path_part, _question_mark_separator, qs_part = path_part.partition("?") - - # NOTE: `yarl.URL.build()` is used to mimic what the Cython-based - # NOTE: parser does, otherwise it results into the same - # NOTE: HTTP Request-Line input producing different - # NOTE: `yarl.URL()` objects - url = URL.build( - path=path_part, - query_string=qs_part, - fragment=url_fragment, - encoded=True, - ) - elif path == "*" and method == "OPTIONS": - # asterisk-form, - url = URL(path, encoded=True) - else: - # absolute-form for proxy maybe, - # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.2 - url = URL(path, encoded=True) - if url.scheme == "": - # not absolute-form - raise InvalidURLError( - path.encode(errors="surrogateescape").decode("latin1") - ) - - # read headers - ( - headers, - raw_headers, - close, - compression, - upgrade, - chunked, - ) = self.parse_headers(lines) - - if close is None: # then the headers weren't set in the request - if version_o <= HttpVersion10: # HTTP 1.0 must asks to not close - close = True - else: # HTTP 1.1 must ask to close. - close = False - - return RawRequestMessage( - method, - path, - version_o, - headers, - raw_headers, - close, - compression, - upgrade, - chunked, - url, - ) - - -class HttpResponseParser(HttpParser[RawResponseMessage]): - """Read response status line and headers. - - BadStatusLine could be raised in case of any errors in status line. - Returns RawResponseMessage. - """ - - # Lax mode should only be enabled on response parser. - lax = not DEBUG - - def feed_data( - self, - data: bytes, - SEP: Optional[_SEP] = None, - *args: Any, - **kwargs: Any, - ) -> Tuple[List[Tuple[RawResponseMessage, StreamReader]], bool, bytes]: - if SEP is None: - SEP = b"\r\n" if DEBUG else b"\n" - return super().feed_data(data, SEP, *args, **kwargs) - - def parse_message(self, lines: List[bytes]) -> RawResponseMessage: - line = lines[0].decode("utf-8", "surrogateescape") - try: - version, status = line.split(maxsplit=1) - except ValueError: - raise BadStatusLine(line) from None - - try: - status, reason = status.split(maxsplit=1) - except ValueError: - status = status.strip() - reason = "" - - if len(reason) > self.max_line_size: - raise LineTooLong( - "Status line is too long", str(self.max_line_size), str(len(reason)) - ) - - # version - match = VERSRE.fullmatch(version) - if match is None: - raise BadStatusLine(line) - version_o = HttpVersion(int(match.group(1)), int(match.group(2))) - - # The status code is a three-digit ASCII number, no padding - if len(status) != 3 or not DIGITS.fullmatch(status): - raise BadStatusLine(line) - status_i = int(status) - - # read headers - ( - headers, - raw_headers, - close, - compression, - upgrade, - chunked, - ) = self.parse_headers(lines) - - if close is None: - close = version_o <= HttpVersion10 - - return RawResponseMessage( - version_o, - status_i, - reason.strip(), - headers, - raw_headers, - close, - compression, - upgrade, - chunked, - ) - - -class HttpPayloadParser: - def __init__( - self, - payload: StreamReader, - length: Optional[int] = None, - chunked: bool = False, - compression: Optional[str] = None, - code: Optional[int] = None, - method: Optional[str] = None, - readall: bool = False, - response_with_body: bool = True, - auto_decompress: bool = True, - lax: bool = False, - ) -> None: - self._length = 0 - self._type = ParseState.PARSE_NONE - self._chunk = ChunkState.PARSE_CHUNKED_SIZE - self._chunk_size = 0 - self._chunk_tail = b"" - self._auto_decompress = auto_decompress - self._lax = lax - self.done = False - - # payload decompression wrapper - if response_with_body and compression and self._auto_decompress: - real_payload: Union[StreamReader, DeflateBuffer] = DeflateBuffer( - payload, compression - ) - else: - real_payload = payload - - # payload parser - if not response_with_body: - # don't parse payload if it's not expected to be received - self._type = ParseState.PARSE_NONE - real_payload.feed_eof() - self.done = True - - elif chunked: - self._type = ParseState.PARSE_CHUNKED - elif length is not None: - self._type = ParseState.PARSE_LENGTH - self._length = length - if self._length == 0: - real_payload.feed_eof() - self.done = True - else: - if readall and code != 204: - self._type = ParseState.PARSE_UNTIL_EOF - elif method in ("PUT", "POST"): - internal_logger.warning( # pragma: no cover - "Content-Length or Transfer-Encoding header is required" - ) - self._type = ParseState.PARSE_NONE - real_payload.feed_eof() - self.done = True - - self.payload = real_payload - - def feed_eof(self) -> None: - if self._type == ParseState.PARSE_UNTIL_EOF: - self.payload.feed_eof() - elif self._type == ParseState.PARSE_LENGTH: - raise ContentLengthError( - "Not enough data for satisfy content length header." - ) - elif self._type == ParseState.PARSE_CHUNKED: - raise TransferEncodingError( - "Not enough data for satisfy transfer length header." - ) - - def feed_data( - self, chunk: bytes, SEP: _SEP = b"\r\n", CHUNK_EXT: bytes = b";" - ) -> Tuple[bool, bytes]: - # Read specified amount of bytes - if self._type == ParseState.PARSE_LENGTH: - required = self._length - chunk_len = len(chunk) - - if required >= chunk_len: - self._length = required - chunk_len - self.payload.feed_data(chunk, chunk_len) - if self._length == 0: - self.payload.feed_eof() - return True, b"" - else: - self._length = 0 - self.payload.feed_data(chunk[:required], required) - self.payload.feed_eof() - return True, chunk[required:] - - # Chunked transfer encoding parser - elif self._type == ParseState.PARSE_CHUNKED: - if self._chunk_tail: - chunk = self._chunk_tail + chunk - self._chunk_tail = b"" - - while chunk: - - # read next chunk size - if self._chunk == ChunkState.PARSE_CHUNKED_SIZE: - pos = chunk.find(SEP) - if pos >= 0: - i = chunk.find(CHUNK_EXT, 0, pos) - if i >= 0: - size_b = chunk[:i] # strip chunk-extensions - else: - size_b = chunk[:pos] - - if self._lax: # Allow whitespace in lax mode. - size_b = size_b.strip() - - if not re.fullmatch(HEXDIGITS, size_b): - exc = TransferEncodingError( - chunk[:pos].decode("ascii", "surrogateescape") - ) - self.payload.set_exception(exc) - raise exc - size = int(bytes(size_b), 16) - - chunk = chunk[pos + len(SEP) :] - if size == 0: # eof marker - self._chunk = ChunkState.PARSE_MAYBE_TRAILERS - if self._lax and chunk.startswith(b"\r"): - chunk = chunk[1:] - else: - self._chunk = ChunkState.PARSE_CHUNKED_CHUNK - self._chunk_size = size - self.payload.begin_http_chunk_receiving() - else: - self._chunk_tail = chunk - return False, b"" - - # read chunk and feed buffer - if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK: - required = self._chunk_size - chunk_len = len(chunk) - - if required > chunk_len: - self._chunk_size = required - chunk_len - self.payload.feed_data(chunk, chunk_len) - return False, b"" - else: - self._chunk_size = 0 - self.payload.feed_data(chunk[:required], required) - chunk = chunk[required:] - if self._lax and chunk.startswith(b"\r"): - chunk = chunk[1:] - self._chunk = ChunkState.PARSE_CHUNKED_CHUNK_EOF - self.payload.end_http_chunk_receiving() - - # toss the CRLF at the end of the chunk - if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK_EOF: - if chunk[: len(SEP)] == SEP: - chunk = chunk[len(SEP) :] - self._chunk = ChunkState.PARSE_CHUNKED_SIZE - else: - self._chunk_tail = chunk - return False, b"" - - # if stream does not contain trailer, after 0\r\n - # we should get another \r\n otherwise - # trailers needs to be skipped until \r\n\r\n - if self._chunk == ChunkState.PARSE_MAYBE_TRAILERS: - head = chunk[: len(SEP)] - if head == SEP: - # end of stream - self.payload.feed_eof() - return True, chunk[len(SEP) :] - # Both CR and LF, or only LF may not be received yet. It is - # expected that CRLF or LF will be shown at the very first - # byte next time, otherwise trailers should come. The last - # CRLF which marks the end of response might not be - # contained in the same TCP segment which delivered the - # size indicator. - if not head: - return False, b"" - if head == SEP[:1]: - self._chunk_tail = head - return False, b"" - self._chunk = ChunkState.PARSE_TRAILERS - - # read and discard trailer up to the CRLF terminator - if self._chunk == ChunkState.PARSE_TRAILERS: - pos = chunk.find(SEP) - if pos >= 0: - chunk = chunk[pos + len(SEP) :] - self._chunk = ChunkState.PARSE_MAYBE_TRAILERS - else: - self._chunk_tail = chunk - return False, b"" - - # Read all bytes until eof - elif self._type == ParseState.PARSE_UNTIL_EOF: - self.payload.feed_data(chunk, len(chunk)) - - return False, b"" - - -class DeflateBuffer: - """DeflateStream decompress stream and feed data into specified stream.""" - - decompressor: Any - - def __init__(self, out: StreamReader, encoding: Optional[str]) -> None: - self.out = out - self.size = 0 - self.encoding = encoding - self._started_decoding = False - - self.decompressor: Union[BrotliDecompressor, ZLibDecompressor] - if encoding == "br": - if not HAS_BROTLI: # pragma: no cover - raise ContentEncodingError( - "Can not decode content-encoding: brotli (br). " - "Please install `Brotli`" - ) - self.decompressor = BrotliDecompressor() - else: - self.decompressor = ZLibDecompressor(encoding=encoding) - - def set_exception(self, exc: BaseException) -> None: - self.out.set_exception(exc) - - def feed_data(self, chunk: bytes, size: int) -> None: - if not size: - return - - self.size += size - - # RFC1950 - # bits 0..3 = CM = 0b1000 = 8 = "deflate" - # bits 4..7 = CINFO = 1..7 = windows size. - if ( - not self._started_decoding - and self.encoding == "deflate" - and chunk[0] & 0xF != 8 - ): - # Change the decoder to decompress incorrectly compressed data - # Actually we should issue a warning about non-RFC-compliant data. - self.decompressor = ZLibDecompressor( - encoding=self.encoding, suppress_deflate_header=True - ) - - try: - chunk = self.decompressor.decompress_sync(chunk) - except Exception: - raise ContentEncodingError( - "Can not decode content-encoding: %s" % self.encoding - ) - - self._started_decoding = True - - if chunk: - self.out.feed_data(chunk, len(chunk)) - - def feed_eof(self) -> None: - chunk = self.decompressor.flush() - - if chunk or self.size > 0: - self.out.feed_data(chunk, len(chunk)) - if self.encoding == "deflate" and not self.decompressor.eof: - raise ContentEncodingError("deflate") - - self.out.feed_eof() - - def begin_http_chunk_receiving(self) -> None: - self.out.begin_http_chunk_receiving() - - def end_http_chunk_receiving(self) -> None: - self.out.end_http_chunk_receiving() - - -HttpRequestParserPy = HttpRequestParser -HttpResponseParserPy = HttpResponseParser -RawRequestMessagePy = RawRequestMessage -RawResponseMessagePy = RawResponseMessage - -try: - if not NO_EXTENSIONS: - from ._http_parser import ( # type: ignore[import-not-found,no-redef] - HttpRequestParser, - HttpResponseParser, - RawRequestMessage, - RawResponseMessage, - ) - - HttpRequestParserC = HttpRequestParser - HttpResponseParserC = HttpResponseParser - RawRequestMessageC = RawRequestMessage - RawResponseMessageC = RawResponseMessage -except ImportError: # pragma: no cover - pass diff --git a/.venv/Lib/site-packages/aiohttp/http_websocket.py b/.venv/Lib/site-packages/aiohttp/http_websocket.py deleted file mode 100644 index b63453f..0000000 --- a/.venv/Lib/site-packages/aiohttp/http_websocket.py +++ /dev/null @@ -1,740 +0,0 @@ -"""WebSocket protocol versions 13 and 8.""" - -import asyncio -import functools -import json -import random -import re -import sys -import zlib -from enum import IntEnum -from struct import Struct -from typing import ( - Any, - Callable, - Final, - List, - NamedTuple, - Optional, - Pattern, - Set, - Tuple, - Union, - cast, -) - -from .base_protocol import BaseProtocol -from .compression_utils import ZLibCompressor, ZLibDecompressor -from .helpers import NO_EXTENSIONS -from .streams import DataQueue - -__all__ = ( - "WS_CLOSED_MESSAGE", - "WS_CLOSING_MESSAGE", - "WS_KEY", - "WebSocketReader", - "WebSocketWriter", - "WSMessage", - "WebSocketError", - "WSMsgType", - "WSCloseCode", -) - - -class WSCloseCode(IntEnum): - OK = 1000 - GOING_AWAY = 1001 - PROTOCOL_ERROR = 1002 - UNSUPPORTED_DATA = 1003 - ABNORMAL_CLOSURE = 1006 - INVALID_TEXT = 1007 - POLICY_VIOLATION = 1008 - MESSAGE_TOO_BIG = 1009 - MANDATORY_EXTENSION = 1010 - INTERNAL_ERROR = 1011 - SERVICE_RESTART = 1012 - TRY_AGAIN_LATER = 1013 - BAD_GATEWAY = 1014 - - -ALLOWED_CLOSE_CODES: Final[Set[int]] = {int(i) for i in WSCloseCode} - -# For websockets, keeping latency low is extremely important as implementations -# generally expect to be able to send and receive messages quickly. We use a -# larger chunk size than the default to reduce the number of executor calls -# since the executor is a significant source of latency and overhead when -# the chunks are small. A size of 5KiB was chosen because it is also the -# same value python-zlib-ng choose to use as the threshold to release the GIL. - -WEBSOCKET_MAX_SYNC_CHUNK_SIZE = 5 * 1024 - - -class WSMsgType(IntEnum): - # websocket spec types - CONTINUATION = 0x0 - TEXT = 0x1 - BINARY = 0x2 - PING = 0x9 - PONG = 0xA - CLOSE = 0x8 - - # aiohttp specific types - CLOSING = 0x100 - CLOSED = 0x101 - ERROR = 0x102 - - text = TEXT - binary = BINARY - ping = PING - pong = PONG - close = CLOSE - closing = CLOSING - closed = CLOSED - error = ERROR - - -WS_KEY: Final[bytes] = b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11" - - -UNPACK_LEN2 = Struct("!H").unpack_from -UNPACK_LEN3 = Struct("!Q").unpack_from -UNPACK_CLOSE_CODE = Struct("!H").unpack -PACK_LEN1 = Struct("!BB").pack -PACK_LEN2 = Struct("!BBH").pack -PACK_LEN3 = Struct("!BBQ").pack -PACK_CLOSE_CODE = Struct("!H").pack -MSG_SIZE: Final[int] = 2**14 -DEFAULT_LIMIT: Final[int] = 2**16 - - -class WSMessage(NamedTuple): - type: WSMsgType - # To type correctly, this would need some kind of tagged union for each type. - data: Any - extra: Optional[str] - - def json(self, *, loads: Callable[[Any], Any] = json.loads) -> Any: - """Return parsed JSON data. - - .. versionadded:: 0.22 - """ - return loads(self.data) - - -WS_CLOSED_MESSAGE = WSMessage(WSMsgType.CLOSED, None, None) -WS_CLOSING_MESSAGE = WSMessage(WSMsgType.CLOSING, None, None) - - -class WebSocketError(Exception): - """WebSocket protocol parser error.""" - - def __init__(self, code: int, message: str) -> None: - self.code = code - super().__init__(code, message) - - def __str__(self) -> str: - return cast(str, self.args[1]) - - -class WSHandshakeError(Exception): - """WebSocket protocol handshake error.""" - - -native_byteorder: Final[str] = sys.byteorder - - -# Used by _websocket_mask_python -@functools.lru_cache -def _xor_table() -> List[bytes]: - return [bytes(a ^ b for a in range(256)) for b in range(256)] - - -def _websocket_mask_python(mask: bytes, data: bytearray) -> None: - """Websocket masking function. - - `mask` is a `bytes` object of length 4; `data` is a `bytearray` - object of any length. The contents of `data` are masked with `mask`, - as specified in section 5.3 of RFC 6455. - - Note that this function mutates the `data` argument. - - This pure-python implementation may be replaced by an optimized - version when available. - - """ - assert isinstance(data, bytearray), data - assert len(mask) == 4, mask - - if data: - _XOR_TABLE = _xor_table() - a, b, c, d = (_XOR_TABLE[n] for n in mask) - data[::4] = data[::4].translate(a) - data[1::4] = data[1::4].translate(b) - data[2::4] = data[2::4].translate(c) - data[3::4] = data[3::4].translate(d) - - -if NO_EXTENSIONS: # pragma: no cover - _websocket_mask = _websocket_mask_python -else: - try: - from ._websocket import _websocket_mask_cython # type: ignore[import-not-found] - - _websocket_mask = _websocket_mask_cython - except ImportError: # pragma: no cover - _websocket_mask = _websocket_mask_python - -_WS_DEFLATE_TRAILING: Final[bytes] = bytes([0x00, 0x00, 0xFF, 0xFF]) - - -_WS_EXT_RE: Final[Pattern[str]] = re.compile( - r"^(?:;\s*(?:" - r"(server_no_context_takeover)|" - r"(client_no_context_takeover)|" - r"(server_max_window_bits(?:=(\d+))?)|" - r"(client_max_window_bits(?:=(\d+))?)))*$" -) - -_WS_EXT_RE_SPLIT: Final[Pattern[str]] = re.compile(r"permessage-deflate([^,]+)?") - - -def ws_ext_parse(extstr: Optional[str], isserver: bool = False) -> Tuple[int, bool]: - if not extstr: - return 0, False - - compress = 0 - notakeover = False - for ext in _WS_EXT_RE_SPLIT.finditer(extstr): - defext = ext.group(1) - # Return compress = 15 when get `permessage-deflate` - if not defext: - compress = 15 - break - match = _WS_EXT_RE.match(defext) - if match: - compress = 15 - if isserver: - # Server never fail to detect compress handshake. - # Server does not need to send max wbit to client - if match.group(4): - compress = int(match.group(4)) - # Group3 must match if group4 matches - # Compress wbit 8 does not support in zlib - # If compress level not support, - # CONTINUE to next extension - if compress > 15 or compress < 9: - compress = 0 - continue - if match.group(1): - notakeover = True - # Ignore regex group 5 & 6 for client_max_window_bits - break - else: - if match.group(6): - compress = int(match.group(6)) - # Group5 must match if group6 matches - # Compress wbit 8 does not support in zlib - # If compress level not support, - # FAIL the parse progress - if compress > 15 or compress < 9: - raise WSHandshakeError("Invalid window size") - if match.group(2): - notakeover = True - # Ignore regex group 5 & 6 for client_max_window_bits - break - # Return Fail if client side and not match - elif not isserver: - raise WSHandshakeError("Extension for deflate not supported" + ext.group(1)) - - return compress, notakeover - - -def ws_ext_gen( - compress: int = 15, isserver: bool = False, server_notakeover: bool = False -) -> str: - # client_notakeover=False not used for server - # compress wbit 8 does not support in zlib - if compress < 9 or compress > 15: - raise ValueError( - "Compress wbits must between 9 and 15, " "zlib does not support wbits=8" - ) - enabledext = ["permessage-deflate"] - if not isserver: - enabledext.append("client_max_window_bits") - - if compress < 15: - enabledext.append("server_max_window_bits=" + str(compress)) - if server_notakeover: - enabledext.append("server_no_context_takeover") - # if client_notakeover: - # enabledext.append('client_no_context_takeover') - return "; ".join(enabledext) - - -class WSParserState(IntEnum): - READ_HEADER = 1 - READ_PAYLOAD_LENGTH = 2 - READ_PAYLOAD_MASK = 3 - READ_PAYLOAD = 4 - - -class WebSocketReader: - def __init__( - self, queue: DataQueue[WSMessage], max_msg_size: int, compress: bool = True - ) -> None: - self.queue = queue - self._max_msg_size = max_msg_size - - self._exc: Optional[BaseException] = None - self._partial = bytearray() - self._state = WSParserState.READ_HEADER - - self._opcode: Optional[int] = None - self._frame_fin = False - self._frame_opcode: Optional[int] = None - self._frame_payload = bytearray() - - self._tail = b"" - self._has_mask = False - self._frame_mask: Optional[bytes] = None - self._payload_length = 0 - self._payload_length_flag = 0 - self._compressed: Optional[bool] = None - self._decompressobj: Optional[ZLibDecompressor] = None - self._compress = compress - - def feed_eof(self) -> None: - self.queue.feed_eof() - - def feed_data(self, data: bytes) -> Tuple[bool, bytes]: - if self._exc: - return True, data - - try: - return self._feed_data(data) - except Exception as exc: - self._exc = exc - self.queue.set_exception(exc) - return True, b"" - - def _feed_data(self, data: bytes) -> Tuple[bool, bytes]: - for fin, opcode, payload, compressed in self.parse_frame(data): - if compressed and not self._decompressobj: - self._decompressobj = ZLibDecompressor(suppress_deflate_header=True) - if opcode == WSMsgType.CLOSE: - if len(payload) >= 2: - close_code = UNPACK_CLOSE_CODE(payload[:2])[0] - if close_code < 3000 and close_code not in ALLOWED_CLOSE_CODES: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - f"Invalid close code: {close_code}", - ) - try: - close_message = payload[2:].decode("utf-8") - except UnicodeDecodeError as exc: - raise WebSocketError( - WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message" - ) from exc - msg = WSMessage(WSMsgType.CLOSE, close_code, close_message) - elif payload: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - f"Invalid close frame: {fin} {opcode} {payload!r}", - ) - else: - msg = WSMessage(WSMsgType.CLOSE, 0, "") - - self.queue.feed_data(msg, 0) - - elif opcode == WSMsgType.PING: - self.queue.feed_data( - WSMessage(WSMsgType.PING, payload, ""), len(payload) - ) - - elif opcode == WSMsgType.PONG: - self.queue.feed_data( - WSMessage(WSMsgType.PONG, payload, ""), len(payload) - ) - - elif ( - opcode not in (WSMsgType.TEXT, WSMsgType.BINARY) - and self._opcode is None - ): - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, f"Unexpected opcode={opcode!r}" - ) - else: - # load text/binary - if not fin: - # got partial frame payload - if opcode != WSMsgType.CONTINUATION: - self._opcode = opcode - self._partial.extend(payload) - if self._max_msg_size and len(self._partial) >= self._max_msg_size: - raise WebSocketError( - WSCloseCode.MESSAGE_TOO_BIG, - "Message size {} exceeds limit {}".format( - len(self._partial), self._max_msg_size - ), - ) - else: - # previous frame was non finished - # we should get continuation opcode - if self._partial: - if opcode != WSMsgType.CONTINUATION: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "The opcode in non-fin frame is expected " - "to be zero, got {!r}".format(opcode), - ) - - if opcode == WSMsgType.CONTINUATION: - assert self._opcode is not None - opcode = self._opcode - self._opcode = None - - self._partial.extend(payload) - if self._max_msg_size and len(self._partial) >= self._max_msg_size: - raise WebSocketError( - WSCloseCode.MESSAGE_TOO_BIG, - "Message size {} exceeds limit {}".format( - len(self._partial), self._max_msg_size - ), - ) - - # Decompress process must to be done after all packets - # received. - if compressed: - assert self._decompressobj is not None - self._partial.extend(_WS_DEFLATE_TRAILING) - payload_merged = self._decompressobj.decompress_sync( - self._partial, self._max_msg_size - ) - if self._decompressobj.unconsumed_tail: - left = len(self._decompressobj.unconsumed_tail) - raise WebSocketError( - WSCloseCode.MESSAGE_TOO_BIG, - "Decompressed message size {} exceeds limit {}".format( - self._max_msg_size + left, self._max_msg_size - ), - ) - else: - payload_merged = bytes(self._partial) - - self._partial.clear() - - if opcode == WSMsgType.TEXT: - try: - text = payload_merged.decode("utf-8") - self.queue.feed_data( - WSMessage(WSMsgType.TEXT, text, ""), len(text) - ) - except UnicodeDecodeError as exc: - raise WebSocketError( - WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message" - ) from exc - else: - self.queue.feed_data( - WSMessage(WSMsgType.BINARY, payload_merged, ""), - len(payload_merged), - ) - - return False, b"" - - def parse_frame( - self, buf: bytes - ) -> List[Tuple[bool, Optional[int], bytearray, Optional[bool]]]: - """Return the next frame from the socket.""" - frames = [] - if self._tail: - buf, self._tail = self._tail + buf, b"" - - start_pos = 0 - buf_length = len(buf) - - while True: - # read header - if self._state == WSParserState.READ_HEADER: - if buf_length - start_pos >= 2: - data = buf[start_pos : start_pos + 2] - start_pos += 2 - first_byte, second_byte = data - - fin = (first_byte >> 7) & 1 - rsv1 = (first_byte >> 6) & 1 - rsv2 = (first_byte >> 5) & 1 - rsv3 = (first_byte >> 4) & 1 - opcode = first_byte & 0xF - - # frame-fin = %x0 ; more frames of this message follow - # / %x1 ; final frame of this message - # frame-rsv1 = %x0 ; - # 1 bit, MUST be 0 unless negotiated otherwise - # frame-rsv2 = %x0 ; - # 1 bit, MUST be 0 unless negotiated otherwise - # frame-rsv3 = %x0 ; - # 1 bit, MUST be 0 unless negotiated otherwise - # - # Remove rsv1 from this test for deflate development - if rsv2 or rsv3 or (rsv1 and not self._compress): - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "Received frame with non-zero reserved bits", - ) - - if opcode > 0x7 and fin == 0: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "Received fragmented control frame", - ) - - has_mask = (second_byte >> 7) & 1 - length = second_byte & 0x7F - - # Control frames MUST have a payload - # length of 125 bytes or less - if opcode > 0x7 and length > 125: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "Control frame payload cannot be " "larger than 125 bytes", - ) - - # Set compress status if last package is FIN - # OR set compress status if this is first fragment - # Raise error if not first fragment with rsv1 = 0x1 - if self._frame_fin or self._compressed is None: - self._compressed = True if rsv1 else False - elif rsv1: - raise WebSocketError( - WSCloseCode.PROTOCOL_ERROR, - "Received frame with non-zero reserved bits", - ) - - self._frame_fin = bool(fin) - self._frame_opcode = opcode - self._has_mask = bool(has_mask) - self._payload_length_flag = length - self._state = WSParserState.READ_PAYLOAD_LENGTH - else: - break - - # read payload length - if self._state == WSParserState.READ_PAYLOAD_LENGTH: - length = self._payload_length_flag - if length == 126: - if buf_length - start_pos >= 2: - data = buf[start_pos : start_pos + 2] - start_pos += 2 - length = UNPACK_LEN2(data)[0] - self._payload_length = length - self._state = ( - WSParserState.READ_PAYLOAD_MASK - if self._has_mask - else WSParserState.READ_PAYLOAD - ) - else: - break - elif length > 126: - if buf_length - start_pos >= 8: - data = buf[start_pos : start_pos + 8] - start_pos += 8 - length = UNPACK_LEN3(data)[0] - self._payload_length = length - self._state = ( - WSParserState.READ_PAYLOAD_MASK - if self._has_mask - else WSParserState.READ_PAYLOAD - ) - else: - break - else: - self._payload_length = length - self._state = ( - WSParserState.READ_PAYLOAD_MASK - if self._has_mask - else WSParserState.READ_PAYLOAD - ) - - # read payload mask - if self._state == WSParserState.READ_PAYLOAD_MASK: - if buf_length - start_pos >= 4: - self._frame_mask = buf[start_pos : start_pos + 4] - start_pos += 4 - self._state = WSParserState.READ_PAYLOAD - else: - break - - if self._state == WSParserState.READ_PAYLOAD: - length = self._payload_length - payload = self._frame_payload - - chunk_len = buf_length - start_pos - if length >= chunk_len: - self._payload_length = length - chunk_len - payload.extend(buf[start_pos:]) - start_pos = buf_length - else: - self._payload_length = 0 - payload.extend(buf[start_pos : start_pos + length]) - start_pos = start_pos + length - - if self._payload_length == 0: - if self._has_mask: - assert self._frame_mask is not None - _websocket_mask(self._frame_mask, payload) - - frames.append( - (self._frame_fin, self._frame_opcode, payload, self._compressed) - ) - - self._frame_payload = bytearray() - self._state = WSParserState.READ_HEADER - else: - break - - self._tail = buf[start_pos:] - - return frames - - -class WebSocketWriter: - def __init__( - self, - protocol: BaseProtocol, - transport: asyncio.Transport, - *, - use_mask: bool = False, - limit: int = DEFAULT_LIMIT, - random: random.Random = random.Random(), - compress: int = 0, - notakeover: bool = False, - ) -> None: - self.protocol = protocol - self.transport = transport - self.use_mask = use_mask - self.randrange = random.randrange - self.compress = compress - self.notakeover = notakeover - self._closing = False - self._limit = limit - self._output_size = 0 - self._compressobj: Any = None # actually compressobj - - async def _send_frame( - self, message: bytes, opcode: int, compress: Optional[int] = None - ) -> None: - """Send a frame over the websocket with message as its payload.""" - if self._closing and not (opcode & WSMsgType.CLOSE): - raise ConnectionResetError("Cannot write to closing transport") - - rsv = 0 - - # Only compress larger packets (disabled) - # Does small packet needs to be compressed? - # if self.compress and opcode < 8 and len(message) > 124: - if (compress or self.compress) and opcode < 8: - if compress: - # Do not set self._compress if compressing is for this frame - compressobj = self._make_compress_obj(compress) - else: # self.compress - if not self._compressobj: - self._compressobj = self._make_compress_obj(self.compress) - compressobj = self._compressobj - - message = await compressobj.compress(message) - # Its critical that we do not return control to the event - # loop until we have finished sending all the compressed - # data. Otherwise we could end up mixing compressed frames - # if there are multiple coroutines compressing data. - message += compressobj.flush( - zlib.Z_FULL_FLUSH if self.notakeover else zlib.Z_SYNC_FLUSH - ) - if message.endswith(_WS_DEFLATE_TRAILING): - message = message[:-4] - rsv = rsv | 0x40 - - msg_length = len(message) - - use_mask = self.use_mask - if use_mask: - mask_bit = 0x80 - else: - mask_bit = 0 - - if msg_length < 126: - header = PACK_LEN1(0x80 | rsv | opcode, msg_length | mask_bit) - elif msg_length < (1 << 16): - header = PACK_LEN2(0x80 | rsv | opcode, 126 | mask_bit, msg_length) - else: - header = PACK_LEN3(0x80 | rsv | opcode, 127 | mask_bit, msg_length) - if use_mask: - mask_int = self.randrange(0, 0xFFFFFFFF) - mask = mask_int.to_bytes(4, "big") - message = bytearray(message) - _websocket_mask(mask, message) - self._write(header + mask + message) - self._output_size += len(header) + len(mask) + msg_length - else: - if msg_length > MSG_SIZE: - self._write(header) - self._write(message) - else: - self._write(header + message) - - self._output_size += len(header) + msg_length - - # It is safe to return control to the event loop when using compression - # after this point as we have already sent or buffered all the data. - - if self._output_size > self._limit: - self._output_size = 0 - await self.protocol._drain_helper() - - def _make_compress_obj(self, compress: int) -> ZLibCompressor: - return ZLibCompressor( - level=zlib.Z_BEST_SPEED, - wbits=-compress, - max_sync_chunk_size=WEBSOCKET_MAX_SYNC_CHUNK_SIZE, - ) - - def _write(self, data: bytes) -> None: - if self.transport is None or self.transport.is_closing(): - raise ConnectionResetError("Cannot write to closing transport") - self.transport.write(data) - - async def pong(self, message: Union[bytes, str] = b"") -> None: - """Send pong message.""" - if isinstance(message, str): - message = message.encode("utf-8") - await self._send_frame(message, WSMsgType.PONG) - - async def ping(self, message: Union[bytes, str] = b"") -> None: - """Send ping message.""" - if isinstance(message, str): - message = message.encode("utf-8") - await self._send_frame(message, WSMsgType.PING) - - async def send( - self, - message: Union[str, bytes], - binary: bool = False, - compress: Optional[int] = None, - ) -> None: - """Send a frame over the websocket with message as its payload.""" - if isinstance(message, str): - message = message.encode("utf-8") - if binary: - await self._send_frame(message, WSMsgType.BINARY, compress) - else: - await self._send_frame(message, WSMsgType.TEXT, compress) - - async def close(self, code: int = 1000, message: Union[bytes, str] = b"") -> None: - """Close the websocket, sending the specified code and message.""" - if isinstance(message, str): - message = message.encode("utf-8") - try: - await self._send_frame( - PACK_CLOSE_CODE(code) + message, opcode=WSMsgType.CLOSE - ) - finally: - self._closing = True diff --git a/.venv/Lib/site-packages/aiohttp/http_writer.py b/.venv/Lib/site-packages/aiohttp/http_writer.py deleted file mode 100644 index d6b02e6..0000000 --- a/.venv/Lib/site-packages/aiohttp/http_writer.py +++ /dev/null @@ -1,198 +0,0 @@ -"""Http related parsers and protocol.""" - -import asyncio -import zlib -from typing import Any, Awaitable, Callable, NamedTuple, Optional, Union # noqa - -from multidict import CIMultiDict - -from .abc import AbstractStreamWriter -from .base_protocol import BaseProtocol -from .compression_utils import ZLibCompressor -from .helpers import NO_EXTENSIONS - -__all__ = ("StreamWriter", "HttpVersion", "HttpVersion10", "HttpVersion11") - - -class HttpVersion(NamedTuple): - major: int - minor: int - - -HttpVersion10 = HttpVersion(1, 0) -HttpVersion11 = HttpVersion(1, 1) - - -_T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]] -_T_OnHeadersSent = Optional[Callable[["CIMultiDict[str]"], Awaitable[None]]] - - -class StreamWriter(AbstractStreamWriter): - def __init__( - self, - protocol: BaseProtocol, - loop: asyncio.AbstractEventLoop, - on_chunk_sent: _T_OnChunkSent = None, - on_headers_sent: _T_OnHeadersSent = None, - ) -> None: - self._protocol = protocol - - self.loop = loop - self.length = None - self.chunked = False - self.buffer_size = 0 - self.output_size = 0 - - self._eof = False - self._compress: Optional[ZLibCompressor] = None - self._drain_waiter = None - - self._on_chunk_sent: _T_OnChunkSent = on_chunk_sent - self._on_headers_sent: _T_OnHeadersSent = on_headers_sent - - @property - def transport(self) -> Optional[asyncio.Transport]: - return self._protocol.transport - - @property - def protocol(self) -> BaseProtocol: - return self._protocol - - def enable_chunking(self) -> None: - self.chunked = True - - def enable_compression( - self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY - ) -> None: - self._compress = ZLibCompressor(encoding=encoding, strategy=strategy) - - def _write(self, chunk: bytes) -> None: - size = len(chunk) - self.buffer_size += size - self.output_size += size - transport = self.transport - if not self._protocol.connected or transport is None or transport.is_closing(): - raise ConnectionResetError("Cannot write to closing transport") - transport.write(chunk) - - async def write( - self, chunk: bytes, *, drain: bool = True, LIMIT: int = 0x10000 - ) -> None: - """Writes chunk of data to a stream. - - write_eof() indicates end of stream. - writer can't be used after write_eof() method being called. - write() return drain future. - """ - if self._on_chunk_sent is not None: - await self._on_chunk_sent(chunk) - - if isinstance(chunk, memoryview): - if chunk.nbytes != len(chunk): - # just reshape it - chunk = chunk.cast("c") - - if self._compress is not None: - chunk = await self._compress.compress(chunk) - if not chunk: - return - - if self.length is not None: - chunk_len = len(chunk) - if self.length >= chunk_len: - self.length = self.length - chunk_len - else: - chunk = chunk[: self.length] - self.length = 0 - if not chunk: - return - - if chunk: - if self.chunked: - chunk_len_pre = ("%x\r\n" % len(chunk)).encode("ascii") - chunk = chunk_len_pre + chunk + b"\r\n" - - self._write(chunk) - - if self.buffer_size > LIMIT and drain: - self.buffer_size = 0 - await self.drain() - - async def write_headers( - self, status_line: str, headers: "CIMultiDict[str]" - ) -> None: - """Write request/response status and headers.""" - if self._on_headers_sent is not None: - await self._on_headers_sent(headers) - - # status + headers - buf = _serialize_headers(status_line, headers) - self._write(buf) - - async def write_eof(self, chunk: bytes = b"") -> None: - if self._eof: - return - - if chunk and self._on_chunk_sent is not None: - await self._on_chunk_sent(chunk) - - if self._compress: - if chunk: - chunk = await self._compress.compress(chunk) - - chunk += self._compress.flush() - if chunk and self.chunked: - chunk_len = ("%x\r\n" % len(chunk)).encode("ascii") - chunk = chunk_len + chunk + b"\r\n0\r\n\r\n" - else: - if self.chunked: - if chunk: - chunk_len = ("%x\r\n" % len(chunk)).encode("ascii") - chunk = chunk_len + chunk + b"\r\n0\r\n\r\n" - else: - chunk = b"0\r\n\r\n" - - if chunk: - self._write(chunk) - - await self.drain() - - self._eof = True - - async def drain(self) -> None: - """Flush the write buffer. - - The intended use is to write - - await w.write(data) - await w.drain() - """ - if self._protocol.transport is not None: - await self._protocol._drain_helper() - - -def _safe_header(string: str) -> str: - if "\r" in string or "\n" in string: - raise ValueError( - "Newline or carriage return detected in headers. " - "Potential header injection attack." - ) - return string - - -def _py_serialize_headers(status_line: str, headers: "CIMultiDict[str]") -> bytes: - headers_gen = (_safe_header(k) + ": " + _safe_header(v) for k, v in headers.items()) - line = status_line + "\r\n" + "\r\n".join(headers_gen) + "\r\n\r\n" - return line.encode("utf-8") - - -_serialize_headers = _py_serialize_headers - -try: - import aiohttp._http_writer as _http_writer # type: ignore[import-not-found] - - _c_serialize_headers = _http_writer._serialize_headers - if not NO_EXTENSIONS: - _serialize_headers = _c_serialize_headers -except ImportError: - pass diff --git a/.venv/Lib/site-packages/aiohttp/locks.py b/.venv/Lib/site-packages/aiohttp/locks.py deleted file mode 100644 index de2dc83..0000000 --- a/.venv/Lib/site-packages/aiohttp/locks.py +++ /dev/null @@ -1,41 +0,0 @@ -import asyncio -import collections -from typing import Any, Deque, Optional - - -class EventResultOrError: - """Event asyncio lock helper class. - - Wraps the Event asyncio lock allowing either to awake the - locked Tasks without any error or raising an exception. - - thanks to @vorpalsmith for the simple design. - """ - - def __init__(self, loop: asyncio.AbstractEventLoop) -> None: - self._loop = loop - self._exc: Optional[BaseException] = None - self._event = asyncio.Event() - self._waiters: Deque[asyncio.Future[Any]] = collections.deque() - - def set(self, exc: Optional[BaseException] = None) -> None: - self._exc = exc - self._event.set() - - async def wait(self) -> Any: - waiter = self._loop.create_task(self._event.wait()) - self._waiters.append(waiter) - try: - val = await waiter - finally: - self._waiters.remove(waiter) - - if self._exc is not None: - raise self._exc - - return val - - def cancel(self) -> None: - """Cancel all waiters""" - for waiter in self._waiters: - waiter.cancel() diff --git a/.venv/Lib/site-packages/aiohttp/log.py b/.venv/Lib/site-packages/aiohttp/log.py deleted file mode 100644 index 3cecea2..0000000 --- a/.venv/Lib/site-packages/aiohttp/log.py +++ /dev/null @@ -1,8 +0,0 @@ -import logging - -access_logger = logging.getLogger("aiohttp.access") -client_logger = logging.getLogger("aiohttp.client") -internal_logger = logging.getLogger("aiohttp.internal") -server_logger = logging.getLogger("aiohttp.server") -web_logger = logging.getLogger("aiohttp.web") -ws_logger = logging.getLogger("aiohttp.websocket") diff --git a/.venv/Lib/site-packages/aiohttp/multipart.py b/.venv/Lib/site-packages/aiohttp/multipart.py deleted file mode 100644 index 602a6b6..0000000 --- a/.venv/Lib/site-packages/aiohttp/multipart.py +++ /dev/null @@ -1,969 +0,0 @@ -import base64 -import binascii -import json -import re -import uuid -import warnings -import zlib -from collections import deque -from types import TracebackType -from typing import ( - TYPE_CHECKING, - Any, - AsyncIterator, - Deque, - Dict, - Iterator, - List, - Mapping, - Optional, - Sequence, - Tuple, - Type, - Union, - cast, -) -from urllib.parse import parse_qsl, unquote, urlencode - -from multidict import CIMultiDict, CIMultiDictProxy, MultiMapping - -from .compression_utils import ZLibCompressor, ZLibDecompressor -from .hdrs import ( - CONTENT_DISPOSITION, - CONTENT_ENCODING, - CONTENT_LENGTH, - CONTENT_TRANSFER_ENCODING, - CONTENT_TYPE, -) -from .helpers import CHAR, TOKEN, parse_mimetype, reify -from .http import HeadersParser -from .payload import ( - JsonPayload, - LookupError, - Order, - Payload, - StringPayload, - get_payload, - payload_type, -) -from .streams import StreamReader - -__all__ = ( - "MultipartReader", - "MultipartWriter", - "BodyPartReader", - "BadContentDispositionHeader", - "BadContentDispositionParam", - "parse_content_disposition", - "content_disposition_filename", -) - - -if TYPE_CHECKING: - from .client_reqrep import ClientResponse - - -class BadContentDispositionHeader(RuntimeWarning): - pass - - -class BadContentDispositionParam(RuntimeWarning): - pass - - -def parse_content_disposition( - header: Optional[str], -) -> Tuple[Optional[str], Dict[str, str]]: - def is_token(string: str) -> bool: - return bool(string) and TOKEN >= set(string) - - def is_quoted(string: str) -> bool: - return string[0] == string[-1] == '"' - - def is_rfc5987(string: str) -> bool: - return is_token(string) and string.count("'") == 2 - - def is_extended_param(string: str) -> bool: - return string.endswith("*") - - def is_continuous_param(string: str) -> bool: - pos = string.find("*") + 1 - if not pos: - return False - substring = string[pos:-1] if string.endswith("*") else string[pos:] - return substring.isdigit() - - def unescape(text: str, *, chars: str = "".join(map(re.escape, CHAR))) -> str: - return re.sub(f"\\\\([{chars}])", "\\1", text) - - if not header: - return None, {} - - disptype, *parts = header.split(";") - if not is_token(disptype): - warnings.warn(BadContentDispositionHeader(header)) - return None, {} - - params: Dict[str, str] = {} - while parts: - item = parts.pop(0) - - if "=" not in item: - warnings.warn(BadContentDispositionHeader(header)) - return None, {} - - key, value = item.split("=", 1) - key = key.lower().strip() - value = value.lstrip() - - if key in params: - warnings.warn(BadContentDispositionHeader(header)) - return None, {} - - if not is_token(key): - warnings.warn(BadContentDispositionParam(item)) - continue - - elif is_continuous_param(key): - if is_quoted(value): - value = unescape(value[1:-1]) - elif not is_token(value): - warnings.warn(BadContentDispositionParam(item)) - continue - - elif is_extended_param(key): - if is_rfc5987(value): - encoding, _, value = value.split("'", 2) - encoding = encoding or "utf-8" - else: - warnings.warn(BadContentDispositionParam(item)) - continue - - try: - value = unquote(value, encoding, "strict") - except UnicodeDecodeError: # pragma: nocover - warnings.warn(BadContentDispositionParam(item)) - continue - - else: - failed = True - if is_quoted(value): - failed = False - value = unescape(value[1:-1].lstrip("\\/")) - elif is_token(value): - failed = False - elif parts: - # maybe just ; in filename, in any case this is just - # one case fix, for proper fix we need to redesign parser - _value = f"{value};{parts[0]}" - if is_quoted(_value): - parts.pop(0) - value = unescape(_value[1:-1].lstrip("\\/")) - failed = False - - if failed: - warnings.warn(BadContentDispositionHeader(header)) - return None, {} - - params[key] = value - - return disptype.lower(), params - - -def content_disposition_filename( - params: Mapping[str, str], name: str = "filename" -) -> Optional[str]: - name_suf = "%s*" % name - if not params: - return None - elif name_suf in params: - return params[name_suf] - elif name in params: - return params[name] - else: - parts = [] - fnparams = sorted( - (key, value) for key, value in params.items() if key.startswith(name_suf) - ) - for num, (key, value) in enumerate(fnparams): - _, tail = key.split("*", 1) - if tail.endswith("*"): - tail = tail[:-1] - if tail == str(num): - parts.append(value) - else: - break - if not parts: - return None - value = "".join(parts) - if "'" in value: - encoding, _, value = value.split("'", 2) - encoding = encoding or "utf-8" - return unquote(value, encoding, "strict") - return value - - -class MultipartResponseWrapper: - """Wrapper around the MultipartReader. - - It takes care about - underlying connection and close it when it needs in. - """ - - def __init__( - self, - resp: "ClientResponse", - stream: "MultipartReader", - ) -> None: - self.resp = resp - self.stream = stream - - def __aiter__(self) -> "MultipartResponseWrapper": - return self - - async def __anext__( - self, - ) -> Union["MultipartReader", "BodyPartReader"]: - part = await self.next() - if part is None: - raise StopAsyncIteration - return part - - def at_eof(self) -> bool: - """Returns True when all response data had been read.""" - return self.resp.content.at_eof() - - async def next( - self, - ) -> Optional[Union["MultipartReader", "BodyPartReader"]]: - """Emits next multipart reader object.""" - item = await self.stream.next() - if self.stream.at_eof(): - await self.release() - return item - - async def release(self) -> None: - """Release the connection gracefully. - - All remaining content is read to the void. - """ - await self.resp.release() - - -class BodyPartReader: - """Multipart reader for single body part.""" - - chunk_size = 8192 - - def __init__( - self, boundary: bytes, headers: "CIMultiDictProxy[str]", content: StreamReader - ) -> None: - self.headers = headers - self._boundary = boundary - self._content = content - self._at_eof = False - length = self.headers.get(CONTENT_LENGTH, None) - self._length = int(length) if length is not None else None - self._read_bytes = 0 - self._unread: Deque[bytes] = deque() - self._prev_chunk: Optional[bytes] = None - self._content_eof = 0 - self._cache: Dict[str, Any] = {} - - def __aiter__(self) -> AsyncIterator["BodyPartReader"]: - return self # type: ignore[return-value] - - async def __anext__(self) -> bytes: - part = await self.next() - if part is None: - raise StopAsyncIteration - return part - - async def next(self) -> Optional[bytes]: - item = await self.read() - if not item: - return None - return item - - async def read(self, *, decode: bool = False) -> bytes: - """Reads body part data. - - decode: Decodes data following by encoding - method from Content-Encoding header. If it missed - data remains untouched - """ - if self._at_eof: - return b"" - data = bytearray() - while not self._at_eof: - data.extend(await self.read_chunk(self.chunk_size)) - if decode: - return self.decode(data) - return data - - async def read_chunk(self, size: int = chunk_size) -> bytes: - """Reads body part content chunk of the specified size. - - size: chunk size - """ - if self._at_eof: - return b"" - if self._length: - chunk = await self._read_chunk_from_length(size) - else: - chunk = await self._read_chunk_from_stream(size) - - self._read_bytes += len(chunk) - if self._read_bytes == self._length: - self._at_eof = True - if self._at_eof: - clrf = await self._content.readline() - assert ( - b"\r\n" == clrf - ), "reader did not read all the data or it is malformed" - return chunk - - async def _read_chunk_from_length(self, size: int) -> bytes: - # Reads body part content chunk of the specified size. - # The body part must has Content-Length header with proper value. - assert self._length is not None, "Content-Length required for chunked read" - chunk_size = min(size, self._length - self._read_bytes) - chunk = await self._content.read(chunk_size) - return chunk - - async def _read_chunk_from_stream(self, size: int) -> bytes: - # Reads content chunk of body part with unknown length. - # The Content-Length header for body part is not necessary. - assert ( - size >= len(self._boundary) + 2 - ), "Chunk size must be greater or equal than boundary length + 2" - first_chunk = self._prev_chunk is None - if first_chunk: - self._prev_chunk = await self._content.read(size) - - chunk = await self._content.read(size) - self._content_eof += int(self._content.at_eof()) - assert self._content_eof < 3, "Reading after EOF" - assert self._prev_chunk is not None - window = self._prev_chunk + chunk - sub = b"\r\n" + self._boundary - if first_chunk: - idx = window.find(sub) - else: - idx = window.find(sub, max(0, len(self._prev_chunk) - len(sub))) - if idx >= 0: - # pushing boundary back to content - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", category=DeprecationWarning) - self._content.unread_data(window[idx:]) - if size > idx: - self._prev_chunk = self._prev_chunk[:idx] - chunk = window[len(self._prev_chunk) : idx] - if not chunk: - self._at_eof = True - result = self._prev_chunk - self._prev_chunk = chunk - return result - - async def readline(self) -> bytes: - """Reads body part by line by line.""" - if self._at_eof: - return b"" - - if self._unread: - line = self._unread.popleft() - else: - line = await self._content.readline() - - if line.startswith(self._boundary): - # the very last boundary may not come with \r\n, - # so set single rules for everyone - sline = line.rstrip(b"\r\n") - boundary = self._boundary - last_boundary = self._boundary + b"--" - # ensure that we read exactly the boundary, not something alike - if sline == boundary or sline == last_boundary: - self._at_eof = True - self._unread.append(line) - return b"" - else: - next_line = await self._content.readline() - if next_line.startswith(self._boundary): - line = line[:-2] # strip CRLF but only once - self._unread.append(next_line) - - return line - - async def release(self) -> None: - """Like read(), but reads all the data to the void.""" - if self._at_eof: - return - while not self._at_eof: - await self.read_chunk(self.chunk_size) - - async def text(self, *, encoding: Optional[str] = None) -> str: - """Like read(), but assumes that body part contains text data.""" - data = await self.read(decode=True) - # see https://www.w3.org/TR/html5/forms.html#multipart/form-data-encoding-algorithm - # and https://dvcs.w3.org/hg/xhr/raw-file/tip/Overview.html#dom-xmlhttprequest-send - encoding = encoding or self.get_charset(default="utf-8") - return data.decode(encoding) - - async def json(self, *, encoding: Optional[str] = None) -> Optional[Dict[str, Any]]: - """Like read(), but assumes that body parts contains JSON data.""" - data = await self.read(decode=True) - if not data: - return None - encoding = encoding or self.get_charset(default="utf-8") - return cast(Dict[str, Any], json.loads(data.decode(encoding))) - - async def form(self, *, encoding: Optional[str] = None) -> List[Tuple[str, str]]: - """Like read(), but assumes that body parts contain form urlencoded data.""" - data = await self.read(decode=True) - if not data: - return [] - if encoding is not None: - real_encoding = encoding - else: - real_encoding = self.get_charset(default="utf-8") - try: - decoded_data = data.rstrip().decode(real_encoding) - except UnicodeDecodeError: - raise ValueError("data cannot be decoded with %s encoding" % real_encoding) - - return parse_qsl( - decoded_data, - keep_blank_values=True, - encoding=real_encoding, - ) - - def at_eof(self) -> bool: - """Returns True if the boundary was reached or False otherwise.""" - return self._at_eof - - def decode(self, data: bytes) -> bytes: - """Decodes data. - - Decoding is done according the specified Content-Encoding - or Content-Transfer-Encoding headers value. - """ - if CONTENT_TRANSFER_ENCODING in self.headers: - data = self._decode_content_transfer(data) - if CONTENT_ENCODING in self.headers: - return self._decode_content(data) - return data - - def _decode_content(self, data: bytes) -> bytes: - encoding = self.headers.get(CONTENT_ENCODING, "").lower() - if encoding == "identity": - return data - if encoding in {"deflate", "gzip"}: - return ZLibDecompressor( - encoding=encoding, - suppress_deflate_header=True, - ).decompress_sync(data) - - raise RuntimeError(f"unknown content encoding: {encoding}") - - def _decode_content_transfer(self, data: bytes) -> bytes: - encoding = self.headers.get(CONTENT_TRANSFER_ENCODING, "").lower() - - if encoding == "base64": - return base64.b64decode(data) - elif encoding == "quoted-printable": - return binascii.a2b_qp(data) - elif encoding in ("binary", "8bit", "7bit"): - return data - else: - raise RuntimeError( - "unknown content transfer encoding: {}" "".format(encoding) - ) - - def get_charset(self, default: str) -> str: - """Returns charset parameter from Content-Type header or default.""" - ctype = self.headers.get(CONTENT_TYPE, "") - mimetype = parse_mimetype(ctype) - return mimetype.parameters.get("charset", default) - - @reify - def name(self) -> Optional[str]: - """Returns name specified in Content-Disposition header. - - If the header is missing or malformed, returns None. - """ - _, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION)) - return content_disposition_filename(params, "name") - - @reify - def filename(self) -> Optional[str]: - """Returns filename specified in Content-Disposition header. - - Returns None if the header is missing or malformed. - """ - _, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION)) - return content_disposition_filename(params, "filename") - - -@payload_type(BodyPartReader, order=Order.try_first) -class BodyPartReaderPayload(Payload): - def __init__(self, value: BodyPartReader, *args: Any, **kwargs: Any) -> None: - super().__init__(value, *args, **kwargs) - - params: Dict[str, str] = {} - if value.name is not None: - params["name"] = value.name - if value.filename is not None: - params["filename"] = value.filename - - if params: - self.set_content_disposition("attachment", True, **params) - - async def write(self, writer: Any) -> None: - field = self._value - chunk = await field.read_chunk(size=2**16) - while chunk: - await writer.write(field.decode(chunk)) - chunk = await field.read_chunk(size=2**16) - - -class MultipartReader: - """Multipart body reader.""" - - #: Response wrapper, used when multipart readers constructs from response. - response_wrapper_cls = MultipartResponseWrapper - #: Multipart reader class, used to handle multipart/* body parts. - #: None points to type(self) - multipart_reader_cls = None - #: Body part reader class for non multipart/* content types. - part_reader_cls = BodyPartReader - - def __init__(self, headers: Mapping[str, str], content: StreamReader) -> None: - self.headers = headers - self._boundary = ("--" + self._get_boundary()).encode() - self._content = content - self._last_part: Optional[Union["MultipartReader", BodyPartReader]] = None - self._at_eof = False - self._at_bof = True - self._unread: List[bytes] = [] - - def __aiter__( - self, - ) -> AsyncIterator["BodyPartReader"]: - return self # type: ignore[return-value] - - async def __anext__( - self, - ) -> Optional[Union["MultipartReader", BodyPartReader]]: - part = await self.next() - if part is None: - raise StopAsyncIteration - return part - - @classmethod - def from_response( - cls, - response: "ClientResponse", - ) -> MultipartResponseWrapper: - """Constructs reader instance from HTTP response. - - :param response: :class:`~aiohttp.client.ClientResponse` instance - """ - obj = cls.response_wrapper_cls( - response, cls(response.headers, response.content) - ) - return obj - - def at_eof(self) -> bool: - """Returns True if the final boundary was reached, false otherwise.""" - return self._at_eof - - async def next( - self, - ) -> Optional[Union["MultipartReader", BodyPartReader]]: - """Emits the next multipart body part.""" - # So, if we're at BOF, we need to skip till the boundary. - if self._at_eof: - return None - await self._maybe_release_last_part() - if self._at_bof: - await self._read_until_first_boundary() - self._at_bof = False - else: - await self._read_boundary() - if self._at_eof: # we just read the last boundary, nothing to do there - return None - self._last_part = await self.fetch_next_part() - return self._last_part - - async def release(self) -> None: - """Reads all the body parts to the void till the final boundary.""" - while not self._at_eof: - item = await self.next() - if item is None: - break - await item.release() - - async def fetch_next_part( - self, - ) -> Union["MultipartReader", BodyPartReader]: - """Returns the next body part reader.""" - headers = await self._read_headers() - return self._get_part_reader(headers) - - def _get_part_reader( - self, - headers: "CIMultiDictProxy[str]", - ) -> Union["MultipartReader", BodyPartReader]: - """Dispatches the response by the `Content-Type` header. - - Returns a suitable reader instance. - - :param dict headers: Response headers - """ - ctype = headers.get(CONTENT_TYPE, "") - mimetype = parse_mimetype(ctype) - - if mimetype.type == "multipart": - if self.multipart_reader_cls is None: - return type(self)(headers, self._content) - return self.multipart_reader_cls(headers, self._content) - else: - return self.part_reader_cls(self._boundary, headers, self._content) - - def _get_boundary(self) -> str: - mimetype = parse_mimetype(self.headers[CONTENT_TYPE]) - - assert mimetype.type == "multipart", "multipart/* content type expected" - - if "boundary" not in mimetype.parameters: - raise ValueError( - "boundary missed for Content-Type: %s" % self.headers[CONTENT_TYPE] - ) - - boundary = mimetype.parameters["boundary"] - if len(boundary) > 70: - raise ValueError("boundary %r is too long (70 chars max)" % boundary) - - return boundary - - async def _readline(self) -> bytes: - if self._unread: - return self._unread.pop() - return await self._content.readline() - - async def _read_until_first_boundary(self) -> None: - while True: - chunk = await self._readline() - if chunk == b"": - raise ValueError( - "Could not find starting boundary %r" % (self._boundary) - ) - chunk = chunk.rstrip() - if chunk == self._boundary: - return - elif chunk == self._boundary + b"--": - self._at_eof = True - return - - async def _read_boundary(self) -> None: - chunk = (await self._readline()).rstrip() - if chunk == self._boundary: - pass - elif chunk == self._boundary + b"--": - self._at_eof = True - epilogue = await self._readline() - next_line = await self._readline() - - # the epilogue is expected and then either the end of input or the - # parent multipart boundary, if the parent boundary is found then - # it should be marked as unread and handed to the parent for - # processing - if next_line[:2] == b"--": - self._unread.append(next_line) - # otherwise the request is likely missing an epilogue and both - # lines should be passed to the parent for processing - # (this handles the old behavior gracefully) - else: - self._unread.extend([next_line, epilogue]) - else: - raise ValueError(f"Invalid boundary {chunk!r}, expected {self._boundary!r}") - - async def _read_headers(self) -> "CIMultiDictProxy[str]": - lines = [b""] - while True: - chunk = await self._content.readline() - chunk = chunk.strip() - lines.append(chunk) - if not chunk: - break - parser = HeadersParser() - headers, raw_headers = parser.parse_headers(lines) - return headers - - async def _maybe_release_last_part(self) -> None: - """Ensures that the last read body part is read completely.""" - if self._last_part is not None: - if not self._last_part.at_eof(): - await self._last_part.release() - self._unread.extend(self._last_part._unread) - self._last_part = None - - -_Part = Tuple[Payload, str, str] - - -class MultipartWriter(Payload): - """Multipart body writer.""" - - def __init__(self, subtype: str = "mixed", boundary: Optional[str] = None) -> None: - boundary = boundary if boundary is not None else uuid.uuid4().hex - # The underlying Payload API demands a str (utf-8), not bytes, - # so we need to ensure we don't lose anything during conversion. - # As a result, require the boundary to be ASCII only. - # In both situations. - - try: - self._boundary = boundary.encode("ascii") - except UnicodeEncodeError: - raise ValueError("boundary should contain ASCII only chars") from None - ctype = f"multipart/{subtype}; boundary={self._boundary_value}" - - super().__init__(None, content_type=ctype) - - self._parts: List[_Part] = [] - - def __enter__(self) -> "MultipartWriter": - return self - - def __exit__( - self, - exc_type: Optional[Type[BaseException]], - exc_val: Optional[BaseException], - exc_tb: Optional[TracebackType], - ) -> None: - pass - - def __iter__(self) -> Iterator[_Part]: - return iter(self._parts) - - def __len__(self) -> int: - return len(self._parts) - - def __bool__(self) -> bool: - return True - - _valid_tchar_regex = re.compile(rb"\A[!#$%&'*+\-.^_`|~\w]+\Z") - _invalid_qdtext_char_regex = re.compile(rb"[\x00-\x08\x0A-\x1F\x7F]") - - @property - def _boundary_value(self) -> str: - """Wrap boundary parameter value in quotes, if necessary. - - Reads self.boundary and returns a unicode string. - """ - # Refer to RFCs 7231, 7230, 5234. - # - # parameter = token "=" ( token / quoted-string ) - # token = 1*tchar - # quoted-string = DQUOTE *( qdtext / quoted-pair ) DQUOTE - # qdtext = HTAB / SP / %x21 / %x23-5B / %x5D-7E / obs-text - # obs-text = %x80-FF - # quoted-pair = "\" ( HTAB / SP / VCHAR / obs-text ) - # tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" - # / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~" - # / DIGIT / ALPHA - # ; any VCHAR, except delimiters - # VCHAR = %x21-7E - value = self._boundary - if re.match(self._valid_tchar_regex, value): - return value.decode("ascii") # cannot fail - - if re.search(self._invalid_qdtext_char_regex, value): - raise ValueError("boundary value contains invalid characters") - - # escape %x5C and %x22 - quoted_value_content = value.replace(b"\\", b"\\\\") - quoted_value_content = quoted_value_content.replace(b'"', b'\\"') - - return '"' + quoted_value_content.decode("ascii") + '"' - - @property - def boundary(self) -> str: - return self._boundary.decode("ascii") - - def append(self, obj: Any, headers: Optional[MultiMapping[str]] = None) -> Payload: - if headers is None: - headers = CIMultiDict() - - if isinstance(obj, Payload): - obj.headers.update(headers) - return self.append_payload(obj) - else: - try: - payload = get_payload(obj, headers=headers) - except LookupError: - raise TypeError("Cannot create payload from %r" % obj) - else: - return self.append_payload(payload) - - def append_payload(self, payload: Payload) -> Payload: - """Adds a new body part to multipart writer.""" - # compression - encoding: Optional[str] = payload.headers.get( - CONTENT_ENCODING, - "", - ).lower() - if encoding and encoding not in ("deflate", "gzip", "identity"): - raise RuntimeError(f"unknown content encoding: {encoding}") - if encoding == "identity": - encoding = None - - # te encoding - te_encoding: Optional[str] = payload.headers.get( - CONTENT_TRANSFER_ENCODING, - "", - ).lower() - if te_encoding not in ("", "base64", "quoted-printable", "binary"): - raise RuntimeError( - "unknown content transfer encoding: {}" "".format(te_encoding) - ) - if te_encoding == "binary": - te_encoding = None - - # size - size = payload.size - if size is not None and not (encoding or te_encoding): - payload.headers[CONTENT_LENGTH] = str(size) - - self._parts.append((payload, encoding, te_encoding)) # type: ignore[arg-type] - return payload - - def append_json( - self, obj: Any, headers: Optional[MultiMapping[str]] = None - ) -> Payload: - """Helper to append JSON part.""" - if headers is None: - headers = CIMultiDict() - - return self.append_payload(JsonPayload(obj, headers=headers)) - - def append_form( - self, - obj: Union[Sequence[Tuple[str, str]], Mapping[str, str]], - headers: Optional[MultiMapping[str]] = None, - ) -> Payload: - """Helper to append form urlencoded part.""" - assert isinstance(obj, (Sequence, Mapping)) - - if headers is None: - headers = CIMultiDict() - - if isinstance(obj, Mapping): - obj = list(obj.items()) - data = urlencode(obj, doseq=True) - - return self.append_payload( - StringPayload( - data, headers=headers, content_type="application/x-www-form-urlencoded" - ) - ) - - @property - def size(self) -> Optional[int]: - """Size of the payload.""" - total = 0 - for part, encoding, te_encoding in self._parts: - if encoding or te_encoding or part.size is None: - return None - - total += int( - 2 - + len(self._boundary) - + 2 - + part.size # b'--'+self._boundary+b'\r\n' - + len(part._binary_headers) - + 2 # b'\r\n' - ) - - total += 2 + len(self._boundary) + 4 # b'--'+self._boundary+b'--\r\n' - return total - - async def write(self, writer: Any, close_boundary: bool = True) -> None: - """Write body.""" - for part, encoding, te_encoding in self._parts: - await writer.write(b"--" + self._boundary + b"\r\n") - await writer.write(part._binary_headers) - - if encoding or te_encoding: - w = MultipartPayloadWriter(writer) - if encoding: - w.enable_compression(encoding) - if te_encoding: - w.enable_encoding(te_encoding) - await part.write(w) # type: ignore[arg-type] - await w.write_eof() - else: - await part.write(writer) - - await writer.write(b"\r\n") - - if close_boundary: - await writer.write(b"--" + self._boundary + b"--\r\n") - - -class MultipartPayloadWriter: - def __init__(self, writer: Any) -> None: - self._writer = writer - self._encoding: Optional[str] = None - self._compress: Optional[ZLibCompressor] = None - self._encoding_buffer: Optional[bytearray] = None - - def enable_encoding(self, encoding: str) -> None: - if encoding == "base64": - self._encoding = encoding - self._encoding_buffer = bytearray() - elif encoding == "quoted-printable": - self._encoding = "quoted-printable" - - def enable_compression( - self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY - ) -> None: - self._compress = ZLibCompressor( - encoding=encoding, - suppress_deflate_header=True, - strategy=strategy, - ) - - async def write_eof(self) -> None: - if self._compress is not None: - chunk = self._compress.flush() - if chunk: - self._compress = None - await self.write(chunk) - - if self._encoding == "base64": - if self._encoding_buffer: - await self._writer.write(base64.b64encode(self._encoding_buffer)) - - async def write(self, chunk: bytes) -> None: - if self._compress is not None: - if chunk: - chunk = await self._compress.compress(chunk) - if not chunk: - return - - if self._encoding == "base64": - buf = self._encoding_buffer - assert buf is not None - buf.extend(chunk) - - if buf: - div, mod = divmod(len(buf), 3) - enc_chunk, self._encoding_buffer = (buf[: div * 3], buf[div * 3 :]) - if enc_chunk: - b64chunk = base64.b64encode(enc_chunk) - await self._writer.write(b64chunk) - elif self._encoding == "quoted-printable": - await self._writer.write(binascii.b2a_qp(chunk)) - else: - await self._writer.write(chunk) diff --git a/.venv/Lib/site-packages/aiohttp/payload.py b/.venv/Lib/site-packages/aiohttp/payload.py deleted file mode 100644 index 6593b05..0000000 --- a/.venv/Lib/site-packages/aiohttp/payload.py +++ /dev/null @@ -1,463 +0,0 @@ -import asyncio -import enum -import io -import json -import mimetypes -import os -import warnings -from abc import ABC, abstractmethod -from itertools import chain -from typing import ( - IO, - TYPE_CHECKING, - Any, - ByteString, - Dict, - Final, - Iterable, - Optional, - TextIO, - Tuple, - Type, - Union, -) - -from multidict import CIMultiDict - -from . import hdrs -from .abc import AbstractStreamWriter -from .helpers import ( - _SENTINEL, - content_disposition_header, - guess_filename, - parse_mimetype, - sentinel, -) -from .streams import StreamReader -from .typedefs import JSONEncoder, _CIMultiDict - -__all__ = ( - "PAYLOAD_REGISTRY", - "get_payload", - "payload_type", - "Payload", - "BytesPayload", - "StringPayload", - "IOBasePayload", - "BytesIOPayload", - "BufferedReaderPayload", - "TextIOPayload", - "StringIOPayload", - "JsonPayload", - "AsyncIterablePayload", -) - -TOO_LARGE_BYTES_BODY: Final[int] = 2**20 # 1 MB - -if TYPE_CHECKING: - from typing import List - - -class LookupError(Exception): - pass - - -class Order(str, enum.Enum): - normal = "normal" - try_first = "try_first" - try_last = "try_last" - - -def get_payload(data: Any, *args: Any, **kwargs: Any) -> "Payload": - return PAYLOAD_REGISTRY.get(data, *args, **kwargs) - - -def register_payload( - factory: Type["Payload"], type: Any, *, order: Order = Order.normal -) -> None: - PAYLOAD_REGISTRY.register(factory, type, order=order) - - -class payload_type: - def __init__(self, type: Any, *, order: Order = Order.normal) -> None: - self.type = type - self.order = order - - def __call__(self, factory: Type["Payload"]) -> Type["Payload"]: - register_payload(factory, self.type, order=self.order) - return factory - - -PayloadType = Type["Payload"] -_PayloadRegistryItem = Tuple[PayloadType, Any] - - -class PayloadRegistry: - """Payload registry. - - note: we need zope.interface for more efficient adapter search - """ - - def __init__(self) -> None: - self._first: List[_PayloadRegistryItem] = [] - self._normal: List[_PayloadRegistryItem] = [] - self._last: List[_PayloadRegistryItem] = [] - - def get( - self, - data: Any, - *args: Any, - _CHAIN: "Type[chain[_PayloadRegistryItem]]" = chain, - **kwargs: Any, - ) -> "Payload": - if isinstance(data, Payload): - return data - for factory, type in _CHAIN(self._first, self._normal, self._last): - if isinstance(data, type): - return factory(data, *args, **kwargs) - - raise LookupError() - - def register( - self, factory: PayloadType, type: Any, *, order: Order = Order.normal - ) -> None: - if order is Order.try_first: - self._first.append((factory, type)) - elif order is Order.normal: - self._normal.append((factory, type)) - elif order is Order.try_last: - self._last.append((factory, type)) - else: - raise ValueError(f"Unsupported order {order!r}") - - -class Payload(ABC): - - _default_content_type: str = "application/octet-stream" - _size: Optional[int] = None - - def __init__( - self, - value: Any, - headers: Optional[ - Union[_CIMultiDict, Dict[str, str], Iterable[Tuple[str, str]]] - ] = None, - content_type: Union[str, None, _SENTINEL] = sentinel, - filename: Optional[str] = None, - encoding: Optional[str] = None, - **kwargs: Any, - ) -> None: - self._encoding = encoding - self._filename = filename - self._headers: _CIMultiDict = CIMultiDict() - self._value = value - if content_type is not sentinel and content_type is not None: - self._headers[hdrs.CONTENT_TYPE] = content_type - elif self._filename is not None: - content_type = mimetypes.guess_type(self._filename)[0] - if content_type is None: - content_type = self._default_content_type - self._headers[hdrs.CONTENT_TYPE] = content_type - else: - self._headers[hdrs.CONTENT_TYPE] = self._default_content_type - self._headers.update(headers or {}) - - @property - def size(self) -> Optional[int]: - """Size of the payload.""" - return self._size - - @property - def filename(self) -> Optional[str]: - """Filename of the payload.""" - return self._filename - - @property - def headers(self) -> _CIMultiDict: - """Custom item headers""" - return self._headers - - @property - def _binary_headers(self) -> bytes: - return ( - "".join([k + ": " + v + "\r\n" for k, v in self.headers.items()]).encode( - "utf-8" - ) - + b"\r\n" - ) - - @property - def encoding(self) -> Optional[str]: - """Payload encoding""" - return self._encoding - - @property - def content_type(self) -> str: - """Content type""" - return self._headers[hdrs.CONTENT_TYPE] - - def set_content_disposition( - self, - disptype: str, - quote_fields: bool = True, - _charset: str = "utf-8", - **params: Any, - ) -> None: - """Sets ``Content-Disposition`` header.""" - self._headers[hdrs.CONTENT_DISPOSITION] = content_disposition_header( - disptype, quote_fields=quote_fields, _charset=_charset, **params - ) - - @abstractmethod - async def write(self, writer: AbstractStreamWriter) -> None: - """Write payload. - - writer is an AbstractStreamWriter instance: - """ - - -class BytesPayload(Payload): - def __init__(self, value: ByteString, *args: Any, **kwargs: Any) -> None: - if not isinstance(value, (bytes, bytearray, memoryview)): - raise TypeError(f"value argument must be byte-ish, not {type(value)!r}") - - if "content_type" not in kwargs: - kwargs["content_type"] = "application/octet-stream" - - super().__init__(value, *args, **kwargs) - - if isinstance(value, memoryview): - self._size = value.nbytes - else: - self._size = len(value) - - if self._size > TOO_LARGE_BYTES_BODY: - kwargs = {"source": self} - warnings.warn( - "Sending a large body directly with raw bytes might" - " lock the event loop. You should probably pass an " - "io.BytesIO object instead", - ResourceWarning, - **kwargs, - ) - - async def write(self, writer: AbstractStreamWriter) -> None: - await writer.write(self._value) - - -class StringPayload(BytesPayload): - def __init__( - self, - value: str, - *args: Any, - encoding: Optional[str] = None, - content_type: Optional[str] = None, - **kwargs: Any, - ) -> None: - - if encoding is None: - if content_type is None: - real_encoding = "utf-8" - content_type = "text/plain; charset=utf-8" - else: - mimetype = parse_mimetype(content_type) - real_encoding = mimetype.parameters.get("charset", "utf-8") - else: - if content_type is None: - content_type = "text/plain; charset=%s" % encoding - real_encoding = encoding - - super().__init__( - value.encode(real_encoding), - encoding=real_encoding, - content_type=content_type, - *args, - **kwargs, - ) - - -class StringIOPayload(StringPayload): - def __init__(self, value: IO[str], *args: Any, **kwargs: Any) -> None: - super().__init__(value.read(), *args, **kwargs) - - -class IOBasePayload(Payload): - _value: IO[Any] - - def __init__( - self, value: IO[Any], disposition: str = "attachment", *args: Any, **kwargs: Any - ) -> None: - if "filename" not in kwargs: - kwargs["filename"] = guess_filename(value) - - super().__init__(value, *args, **kwargs) - - if self._filename is not None and disposition is not None: - if hdrs.CONTENT_DISPOSITION not in self.headers: - self.set_content_disposition(disposition, filename=self._filename) - - async def write(self, writer: AbstractStreamWriter) -> None: - loop = asyncio.get_event_loop() - try: - chunk = await loop.run_in_executor(None, self._value.read, 2**16) - while chunk: - await writer.write(chunk) - chunk = await loop.run_in_executor(None, self._value.read, 2**16) - finally: - await loop.run_in_executor(None, self._value.close) - - -class TextIOPayload(IOBasePayload): - _value: TextIO - - def __init__( - self, - value: TextIO, - *args: Any, - encoding: Optional[str] = None, - content_type: Optional[str] = None, - **kwargs: Any, - ) -> None: - - if encoding is None: - if content_type is None: - encoding = "utf-8" - content_type = "text/plain; charset=utf-8" - else: - mimetype = parse_mimetype(content_type) - encoding = mimetype.parameters.get("charset", "utf-8") - else: - if content_type is None: - content_type = "text/plain; charset=%s" % encoding - - super().__init__( - value, - content_type=content_type, - encoding=encoding, - *args, - **kwargs, - ) - - @property - def size(self) -> Optional[int]: - try: - return os.fstat(self._value.fileno()).st_size - self._value.tell() - except OSError: - return None - - async def write(self, writer: AbstractStreamWriter) -> None: - loop = asyncio.get_event_loop() - try: - chunk = await loop.run_in_executor(None, self._value.read, 2**16) - while chunk: - data = ( - chunk.encode(encoding=self._encoding) - if self._encoding - else chunk.encode() - ) - await writer.write(data) - chunk = await loop.run_in_executor(None, self._value.read, 2**16) - finally: - await loop.run_in_executor(None, self._value.close) - - -class BytesIOPayload(IOBasePayload): - @property - def size(self) -> int: - position = self._value.tell() - end = self._value.seek(0, os.SEEK_END) - self._value.seek(position) - return end - position - - -class BufferedReaderPayload(IOBasePayload): - @property - def size(self) -> Optional[int]: - try: - return os.fstat(self._value.fileno()).st_size - self._value.tell() - except OSError: - # data.fileno() is not supported, e.g. - # io.BufferedReader(io.BytesIO(b'data')) - return None - - -class JsonPayload(BytesPayload): - def __init__( - self, - value: Any, - encoding: str = "utf-8", - content_type: str = "application/json", - dumps: JSONEncoder = json.dumps, - *args: Any, - **kwargs: Any, - ) -> None: - - super().__init__( - dumps(value).encode(encoding), - content_type=content_type, - encoding=encoding, - *args, - **kwargs, - ) - - -if TYPE_CHECKING: - from typing import AsyncIterable, AsyncIterator - - _AsyncIterator = AsyncIterator[bytes] - _AsyncIterable = AsyncIterable[bytes] -else: - from collections.abc import AsyncIterable, AsyncIterator - - _AsyncIterator = AsyncIterator - _AsyncIterable = AsyncIterable - - -class AsyncIterablePayload(Payload): - - _iter: Optional[_AsyncIterator] = None - - def __init__(self, value: _AsyncIterable, *args: Any, **kwargs: Any) -> None: - if not isinstance(value, AsyncIterable): - raise TypeError( - "value argument must support " - "collections.abc.AsyncIterable interface, " - "got {!r}".format(type(value)) - ) - - if "content_type" not in kwargs: - kwargs["content_type"] = "application/octet-stream" - - super().__init__(value, *args, **kwargs) - - self._iter = value.__aiter__() - - async def write(self, writer: AbstractStreamWriter) -> None: - if self._iter: - try: - # iter is not None check prevents rare cases - # when the case iterable is used twice - while True: - chunk = await self._iter.__anext__() - await writer.write(chunk) - except StopAsyncIteration: - self._iter = None - - -class StreamReaderPayload(AsyncIterablePayload): - def __init__(self, value: StreamReader, *args: Any, **kwargs: Any) -> None: - super().__init__(value.iter_any(), *args, **kwargs) - - -PAYLOAD_REGISTRY = PayloadRegistry() -PAYLOAD_REGISTRY.register(BytesPayload, (bytes, bytearray, memoryview)) -PAYLOAD_REGISTRY.register(StringPayload, str) -PAYLOAD_REGISTRY.register(StringIOPayload, io.StringIO) -PAYLOAD_REGISTRY.register(TextIOPayload, io.TextIOBase) -PAYLOAD_REGISTRY.register(BytesIOPayload, io.BytesIO) -PAYLOAD_REGISTRY.register(BufferedReaderPayload, (io.BufferedReader, io.BufferedRandom)) -PAYLOAD_REGISTRY.register(IOBasePayload, io.IOBase) -PAYLOAD_REGISTRY.register(StreamReaderPayload, StreamReader) -# try_last for giving a chance to more specialized async interables like -# multidict.BodyPartReaderPayload override the default -PAYLOAD_REGISTRY.register(AsyncIterablePayload, AsyncIterable, order=Order.try_last) diff --git a/.venv/Lib/site-packages/aiohttp/payload_streamer.py b/.venv/Lib/site-packages/aiohttp/payload_streamer.py deleted file mode 100644 index 364f763..0000000 --- a/.venv/Lib/site-packages/aiohttp/payload_streamer.py +++ /dev/null @@ -1,75 +0,0 @@ -""" -Payload implementation for coroutines as data provider. - -As a simple case, you can upload data from file:: - - @aiohttp.streamer - async def file_sender(writer, file_name=None): - with open(file_name, 'rb') as f: - chunk = f.read(2**16) - while chunk: - await writer.write(chunk) - - chunk = f.read(2**16) - -Then you can use `file_sender` like this: - - async with session.post('http://httpbin.org/post', - data=file_sender(file_name='huge_file')) as resp: - print(await resp.text()) - -..note:: Coroutine must accept `writer` as first argument - -""" - -import types -import warnings -from typing import Any, Awaitable, Callable, Dict, Tuple - -from .abc import AbstractStreamWriter -from .payload import Payload, payload_type - -__all__ = ("streamer",) - - -class _stream_wrapper: - def __init__( - self, - coro: Callable[..., Awaitable[None]], - args: Tuple[Any, ...], - kwargs: Dict[str, Any], - ) -> None: - self.coro = types.coroutine(coro) - self.args = args - self.kwargs = kwargs - - async def __call__(self, writer: AbstractStreamWriter) -> None: - await self.coro(writer, *self.args, **self.kwargs) - - -class streamer: - def __init__(self, coro: Callable[..., Awaitable[None]]) -> None: - warnings.warn( - "@streamer is deprecated, use async generators instead", - DeprecationWarning, - stacklevel=2, - ) - self.coro = coro - - def __call__(self, *args: Any, **kwargs: Any) -> _stream_wrapper: - return _stream_wrapper(self.coro, args, kwargs) - - -@payload_type(_stream_wrapper) -class StreamWrapperPayload(Payload): - async def write(self, writer: AbstractStreamWriter) -> None: - await self._value(writer) - - -@payload_type(streamer) -class StreamPayload(StreamWrapperPayload): - def __init__(self, value: Any, *args: Any, **kwargs: Any) -> None: - super().__init__(value(), *args, **kwargs) - - async def write(self, writer: AbstractStreamWriter) -> None: - await self._value(writer) diff --git a/.venv/Lib/site-packages/aiohttp/py.typed b/.venv/Lib/site-packages/aiohttp/py.typed deleted file mode 100644 index f5642f7..0000000 --- a/.venv/Lib/site-packages/aiohttp/py.typed +++ /dev/null @@ -1 +0,0 @@ -Marker diff --git a/.venv/Lib/site-packages/aiohttp/pytest_plugin.py b/.venv/Lib/site-packages/aiohttp/pytest_plugin.py deleted file mode 100644 index 5754747..0000000 --- a/.venv/Lib/site-packages/aiohttp/pytest_plugin.py +++ /dev/null @@ -1,381 +0,0 @@ -import asyncio -import contextlib -import warnings -from typing import Any, Awaitable, Callable, Dict, Iterator, Optional, Type, Union - -import pytest - -from aiohttp.helpers import isasyncgenfunction -from aiohttp.web import Application - -from .test_utils import ( - BaseTestServer, - RawTestServer, - TestClient, - TestServer, - loop_context, - setup_test_loop, - teardown_test_loop, - unused_port as _unused_port, -) - -try: - import uvloop -except ImportError: # pragma: no cover - uvloop = None # type: ignore[assignment] - -AiohttpClient = Callable[[Union[Application, BaseTestServer]], Awaitable[TestClient]] -AiohttpRawServer = Callable[[Application], Awaitable[RawTestServer]] -AiohttpServer = Callable[[Application], Awaitable[TestServer]] - - -def pytest_addoption(parser): # type: ignore[no-untyped-def] - parser.addoption( - "--aiohttp-fast", - action="store_true", - default=False, - help="run tests faster by disabling extra checks", - ) - parser.addoption( - "--aiohttp-loop", - action="store", - default="pyloop", - help="run tests with specific loop: pyloop, uvloop or all", - ) - parser.addoption( - "--aiohttp-enable-loop-debug", - action="store_true", - default=False, - help="enable event loop debug mode", - ) - - -def pytest_fixture_setup(fixturedef): # type: ignore[no-untyped-def] - """Set up pytest fixture. - - Allow fixtures to be coroutines. Run coroutine fixtures in an event loop. - """ - func = fixturedef.func - - if isasyncgenfunction(func): - # async generator fixture - is_async_gen = True - elif asyncio.iscoroutinefunction(func): - # regular async fixture - is_async_gen = False - else: - # not an async fixture, nothing to do - return - - strip_request = False - if "request" not in fixturedef.argnames: - fixturedef.argnames += ("request",) - strip_request = True - - def wrapper(*args, **kwargs): # type: ignore[no-untyped-def] - request = kwargs["request"] - if strip_request: - del kwargs["request"] - - # if neither the fixture nor the test use the 'loop' fixture, - # 'getfixturevalue' will fail because the test is not parameterized - # (this can be removed someday if 'loop' is no longer parameterized) - if "loop" not in request.fixturenames: - raise Exception( - "Asynchronous fixtures must depend on the 'loop' fixture or " - "be used in tests depending from it." - ) - - _loop = request.getfixturevalue("loop") - - if is_async_gen: - # for async generators, we need to advance the generator once, - # then advance it again in a finalizer - gen = func(*args, **kwargs) - - def finalizer(): # type: ignore[no-untyped-def] - try: - return _loop.run_until_complete(gen.__anext__()) - except StopAsyncIteration: - pass - - request.addfinalizer(finalizer) - return _loop.run_until_complete(gen.__anext__()) - else: - return _loop.run_until_complete(func(*args, **kwargs)) - - fixturedef.func = wrapper - - -@pytest.fixture -def fast(request): # type: ignore[no-untyped-def] - """--fast config option""" - return request.config.getoption("--aiohttp-fast") - - -@pytest.fixture -def loop_debug(request): # type: ignore[no-untyped-def] - """--enable-loop-debug config option""" - return request.config.getoption("--aiohttp-enable-loop-debug") - - -@contextlib.contextmanager -def _runtime_warning_context(): # type: ignore[no-untyped-def] - """Context manager which checks for RuntimeWarnings. - - This exists specifically to - avoid "coroutine 'X' was never awaited" warnings being missed. - - If RuntimeWarnings occur in the context a RuntimeError is raised. - """ - with warnings.catch_warnings(record=True) as _warnings: - yield - rw = [ - "{w.filename}:{w.lineno}:{w.message}".format(w=w) - for w in _warnings - if w.category == RuntimeWarning - ] - if rw: - raise RuntimeError( - "{} Runtime Warning{},\n{}".format( - len(rw), "" if len(rw) == 1 else "s", "\n".join(rw) - ) - ) - - -@contextlib.contextmanager -def _passthrough_loop_context(loop, fast=False): # type: ignore[no-untyped-def] - """Passthrough loop context. - - Sets up and tears down a loop unless one is passed in via the loop - argument when it's passed straight through. - """ - if loop: - # loop already exists, pass it straight through - yield loop - else: - # this shadows loop_context's standard behavior - loop = setup_test_loop() - yield loop - teardown_test_loop(loop, fast=fast) - - -def pytest_pycollect_makeitem(collector, name, obj): # type: ignore[no-untyped-def] - """Fix pytest collecting for coroutines.""" - if collector.funcnamefilter(name) and asyncio.iscoroutinefunction(obj): - return list(collector._genfunctions(name, obj)) - - -def pytest_pyfunc_call(pyfuncitem): # type: ignore[no-untyped-def] - """Run coroutines in an event loop instead of a normal function call.""" - fast = pyfuncitem.config.getoption("--aiohttp-fast") - if asyncio.iscoroutinefunction(pyfuncitem.function): - existing_loop = pyfuncitem.funcargs.get( - "proactor_loop" - ) or pyfuncitem.funcargs.get("loop", None) - with _runtime_warning_context(): - with _passthrough_loop_context(existing_loop, fast=fast) as _loop: - testargs = { - arg: pyfuncitem.funcargs[arg] - for arg in pyfuncitem._fixtureinfo.argnames - } - _loop.run_until_complete(pyfuncitem.obj(**testargs)) - - return True - - -def pytest_generate_tests(metafunc): # type: ignore[no-untyped-def] - if "loop_factory" not in metafunc.fixturenames: - return - - loops = metafunc.config.option.aiohttp_loop - avail_factories: Dict[str, Type[asyncio.AbstractEventLoopPolicy]] - avail_factories = {"pyloop": asyncio.DefaultEventLoopPolicy} - - if uvloop is not None: # pragma: no cover - avail_factories["uvloop"] = uvloop.EventLoopPolicy - - if loops == "all": - loops = "pyloop,uvloop?" - - factories = {} # type: ignore[var-annotated] - for name in loops.split(","): - required = not name.endswith("?") - name = name.strip(" ?") - if name not in avail_factories: # pragma: no cover - if required: - raise ValueError( - "Unknown loop '%s', available loops: %s" - % (name, list(factories.keys())) - ) - else: - continue - factories[name] = avail_factories[name] - metafunc.parametrize( - "loop_factory", list(factories.values()), ids=list(factories.keys()) - ) - - -@pytest.fixture -def loop(loop_factory, fast, loop_debug): # type: ignore[no-untyped-def] - """Return an instance of the event loop.""" - policy = loop_factory() - asyncio.set_event_loop_policy(policy) - with loop_context(fast=fast) as _loop: - if loop_debug: - _loop.set_debug(True) # pragma: no cover - asyncio.set_event_loop(_loop) - yield _loop - - -@pytest.fixture -def proactor_loop(): # type: ignore[no-untyped-def] - policy = asyncio.WindowsProactorEventLoopPolicy() # type: ignore[attr-defined] - asyncio.set_event_loop_policy(policy) - - with loop_context(policy.new_event_loop) as _loop: - asyncio.set_event_loop(_loop) - yield _loop - - -@pytest.fixture -def unused_port(aiohttp_unused_port: Callable[[], int]) -> Callable[[], int]: - warnings.warn( - "Deprecated, use aiohttp_unused_port fixture instead", - DeprecationWarning, - stacklevel=2, - ) - return aiohttp_unused_port - - -@pytest.fixture -def aiohttp_unused_port() -> Callable[[], int]: - """Return a port that is unused on the current host.""" - return _unused_port - - -@pytest.fixture -def aiohttp_server(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpServer]: - """Factory to create a TestServer instance, given an app. - - aiohttp_server(app, **kwargs) - """ - servers = [] - - async def go(app, *, port=None, **kwargs): # type: ignore[no-untyped-def] - server = TestServer(app, port=port) - await server.start_server(loop=loop, **kwargs) - servers.append(server) - return server - - yield go - - async def finalize() -> None: - while servers: - await servers.pop().close() - - loop.run_until_complete(finalize()) - - -@pytest.fixture -def test_server(aiohttp_server): # type: ignore[no-untyped-def] # pragma: no cover - warnings.warn( - "Deprecated, use aiohttp_server fixture instead", - DeprecationWarning, - stacklevel=2, - ) - return aiohttp_server - - -@pytest.fixture -def aiohttp_raw_server(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpRawServer]: - """Factory to create a RawTestServer instance, given a web handler. - - aiohttp_raw_server(handler, **kwargs) - """ - servers = [] - - async def go(handler, *, port=None, **kwargs): # type: ignore[no-untyped-def] - server = RawTestServer(handler, port=port) - await server.start_server(loop=loop, **kwargs) - servers.append(server) - return server - - yield go - - async def finalize() -> None: - while servers: - await servers.pop().close() - - loop.run_until_complete(finalize()) - - -@pytest.fixture -def raw_test_server( # type: ignore[no-untyped-def] # pragma: no cover - aiohttp_raw_server, -): - warnings.warn( - "Deprecated, use aiohttp_raw_server fixture instead", - DeprecationWarning, - stacklevel=2, - ) - return aiohttp_raw_server - - -@pytest.fixture -def aiohttp_client( - loop: asyncio.AbstractEventLoop, -) -> Iterator[AiohttpClient]: - """Factory to create a TestClient instance. - - aiohttp_client(app, **kwargs) - aiohttp_client(server, **kwargs) - aiohttp_client(raw_server, **kwargs) - """ - clients = [] - - async def go( - __param: Union[Application, BaseTestServer], - *args: Any, - server_kwargs: Optional[Dict[str, Any]] = None, - **kwargs: Any - ) -> TestClient: - - if isinstance(__param, Callable) and not isinstance( # type: ignore[arg-type] - __param, (Application, BaseTestServer) - ): - __param = __param(loop, *args, **kwargs) - kwargs = {} - else: - assert not args, "args should be empty" - - if isinstance(__param, Application): - server_kwargs = server_kwargs or {} - server = TestServer(__param, loop=loop, **server_kwargs) - client = TestClient(server, loop=loop, **kwargs) - elif isinstance(__param, BaseTestServer): - client = TestClient(__param, loop=loop, **kwargs) - else: - raise ValueError("Unknown argument type: %r" % type(__param)) - - await client.start_server() - clients.append(client) - return client - - yield go - - async def finalize() -> None: - while clients: - await clients.pop().close() - - loop.run_until_complete(finalize()) - - -@pytest.fixture -def test_client(aiohttp_client): # type: ignore[no-untyped-def] # pragma: no cover - warnings.warn( - "Deprecated, use aiohttp_client fixture instead", - DeprecationWarning, - stacklevel=2, - ) - return aiohttp_client diff --git a/.venv/Lib/site-packages/aiohttp/resolver.py b/.venv/Lib/site-packages/aiohttp/resolver.py deleted file mode 100644 index 6c17b1e..0000000 --- a/.venv/Lib/site-packages/aiohttp/resolver.py +++ /dev/null @@ -1,160 +0,0 @@ -import asyncio -import socket -from typing import Any, Dict, List, Optional, Type, Union - -from .abc import AbstractResolver -from .helpers import get_running_loop - -__all__ = ("ThreadedResolver", "AsyncResolver", "DefaultResolver") - -try: - import aiodns - - # aiodns_default = hasattr(aiodns.DNSResolver, 'gethostbyname') -except ImportError: # pragma: no cover - aiodns = None - -aiodns_default = False - - -class ThreadedResolver(AbstractResolver): - """Threaded resolver. - - Uses an Executor for synchronous getaddrinfo() calls. - concurrent.futures.ThreadPoolExecutor is used by default. - """ - - def __init__(self, loop: Optional[asyncio.AbstractEventLoop] = None) -> None: - self._loop = get_running_loop(loop) - - async def resolve( - self, hostname: str, port: int = 0, family: int = socket.AF_INET - ) -> List[Dict[str, Any]]: - infos = await self._loop.getaddrinfo( - hostname, - port, - type=socket.SOCK_STREAM, - family=family, - flags=socket.AI_ADDRCONFIG, - ) - - hosts = [] - for family, _, proto, _, address in infos: - if family == socket.AF_INET6: - if len(address) < 3: - # IPv6 is not supported by Python build, - # or IPv6 is not enabled in the host - continue - if address[3]: - # This is essential for link-local IPv6 addresses. - # LL IPv6 is a VERY rare case. Strictly speaking, we should use - # getnameinfo() unconditionally, but performance makes sense. - host, _port = socket.getnameinfo( - address, socket.NI_NUMERICHOST | socket.NI_NUMERICSERV - ) - port = int(_port) - else: - host, port = address[:2] - else: # IPv4 - assert family == socket.AF_INET - host, port = address # type: ignore[misc] - hosts.append( - { - "hostname": hostname, - "host": host, - "port": port, - "family": family, - "proto": proto, - "flags": socket.AI_NUMERICHOST | socket.AI_NUMERICSERV, - } - ) - - return hosts - - async def close(self) -> None: - pass - - -class AsyncResolver(AbstractResolver): - """Use the `aiodns` package to make asynchronous DNS lookups""" - - def __init__( - self, - loop: Optional[asyncio.AbstractEventLoop] = None, - *args: Any, - **kwargs: Any - ) -> None: - if aiodns is None: - raise RuntimeError("Resolver requires aiodns library") - - self._loop = get_running_loop(loop) - self._resolver = aiodns.DNSResolver(*args, loop=loop, **kwargs) - - if not hasattr(self._resolver, "gethostbyname"): - # aiodns 1.1 is not available, fallback to DNSResolver.query - self.resolve = self._resolve_with_query # type: ignore - - async def resolve( - self, host: str, port: int = 0, family: int = socket.AF_INET - ) -> List[Dict[str, Any]]: - try: - resp = await self._resolver.gethostbyname(host, family) - except aiodns.error.DNSError as exc: - msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed" - raise OSError(msg) from exc - hosts = [] - for address in resp.addresses: - hosts.append( - { - "hostname": host, - "host": address, - "port": port, - "family": family, - "proto": 0, - "flags": socket.AI_NUMERICHOST | socket.AI_NUMERICSERV, - } - ) - - if not hosts: - raise OSError("DNS lookup failed") - - return hosts - - async def _resolve_with_query( - self, host: str, port: int = 0, family: int = socket.AF_INET - ) -> List[Dict[str, Any]]: - if family == socket.AF_INET6: - qtype = "AAAA" - else: - qtype = "A" - - try: - resp = await self._resolver.query(host, qtype) - except aiodns.error.DNSError as exc: - msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed" - raise OSError(msg) from exc - - hosts = [] - for rr in resp: - hosts.append( - { - "hostname": host, - "host": rr.host, - "port": port, - "family": family, - "proto": 0, - "flags": socket.AI_NUMERICHOST, - } - ) - - if not hosts: - raise OSError("DNS lookup failed") - - return hosts - - async def close(self) -> None: - self._resolver.cancel() - - -_DefaultType = Type[Union[AsyncResolver, ThreadedResolver]] -DefaultResolver: _DefaultType = AsyncResolver if aiodns_default else ThreadedResolver diff --git a/.venv/Lib/site-packages/aiohttp/streams.py b/.venv/Lib/site-packages/aiohttp/streams.py deleted file mode 100644 index 3e4c355..0000000 --- a/.venv/Lib/site-packages/aiohttp/streams.py +++ /dev/null @@ -1,666 +0,0 @@ -import asyncio -import collections -import warnings -from typing import ( - Awaitable, - Callable, - Deque, - Final, - Generic, - List, - Optional, - Tuple, - TypeVar, -) - -from .base_protocol import BaseProtocol -from .helpers import BaseTimerContext, TimerNoop, set_exception, set_result -from .log import internal_logger - -__all__ = ( - "EMPTY_PAYLOAD", - "EofStream", - "StreamReader", - "DataQueue", - "FlowControlDataQueue", -) - -_T = TypeVar("_T") - - -class EofStream(Exception): - """eof stream indication.""" - - -class AsyncStreamIterator(Generic[_T]): - def __init__(self, read_func: Callable[[], Awaitable[_T]]) -> None: - self.read_func = read_func - - def __aiter__(self) -> "AsyncStreamIterator[_T]": - return self - - async def __anext__(self) -> _T: - try: - rv = await self.read_func() - except EofStream: - raise StopAsyncIteration - if rv == b"": - raise StopAsyncIteration - return rv - - -class ChunkTupleAsyncStreamIterator: - def __init__(self, stream: "StreamReader") -> None: - self._stream = stream - - def __aiter__(self) -> "ChunkTupleAsyncStreamIterator": - return self - - async def __anext__(self) -> Tuple[bytes, bool]: - rv = await self._stream.readchunk() - if rv == (b"", False): - raise StopAsyncIteration - return rv - - -class AsyncStreamReaderMixin: - def __aiter__(self) -> AsyncStreamIterator[bytes]: - return AsyncStreamIterator(self.readline) # type: ignore[attr-defined] - - def iter_chunked(self, n: int) -> AsyncStreamIterator[bytes]: - """Returns an asynchronous iterator that yields chunks of size n.""" - return AsyncStreamIterator(lambda: self.read(n)) # type: ignore[attr-defined] - - def iter_any(self) -> AsyncStreamIterator[bytes]: - """Yield all available data as soon as it is received.""" - return AsyncStreamIterator(self.readany) # type: ignore[attr-defined] - - def iter_chunks(self) -> ChunkTupleAsyncStreamIterator: - """Yield chunks of data as they are received by the server. - - The yielded objects are tuples - of (bytes, bool) as returned by the StreamReader.readchunk method. - """ - return ChunkTupleAsyncStreamIterator(self) # type: ignore[arg-type] - - -class StreamReader(AsyncStreamReaderMixin): - """An enhancement of asyncio.StreamReader. - - Supports asynchronous iteration by line, chunk or as available:: - - async for line in reader: - ... - async for chunk in reader.iter_chunked(1024): - ... - async for slice in reader.iter_any(): - ... - - """ - - total_bytes = 0 - - def __init__( - self, - protocol: BaseProtocol, - limit: int, - *, - timer: Optional[BaseTimerContext] = None, - loop: Optional[asyncio.AbstractEventLoop] = None, - ) -> None: - self._protocol = protocol - self._low_water = limit - self._high_water = limit * 2 - if loop is None: - loop = asyncio.get_event_loop() - self._loop = loop - self._size = 0 - self._cursor = 0 - self._http_chunk_splits: Optional[List[int]] = None - self._buffer: Deque[bytes] = collections.deque() - self._buffer_offset = 0 - self._eof = False - self._waiter: Optional[asyncio.Future[None]] = None - self._eof_waiter: Optional[asyncio.Future[None]] = None - self._exception: Optional[BaseException] = None - self._timer = TimerNoop() if timer is None else timer - self._eof_callbacks: List[Callable[[], None]] = [] - - def __repr__(self) -> str: - info = [self.__class__.__name__] - if self._size: - info.append("%d bytes" % self._size) - if self._eof: - info.append("eof") - if self._low_water != 2**16: # default limit - info.append("low=%d high=%d" % (self._low_water, self._high_water)) - if self._waiter: - info.append("w=%r" % self._waiter) - if self._exception: - info.append("e=%r" % self._exception) - return "<%s>" % " ".join(info) - - def get_read_buffer_limits(self) -> Tuple[int, int]: - return (self._low_water, self._high_water) - - def exception(self) -> Optional[BaseException]: - return self._exception - - def set_exception(self, exc: BaseException) -> None: - self._exception = exc - self._eof_callbacks.clear() - - waiter = self._waiter - if waiter is not None: - self._waiter = None - set_exception(waiter, exc) - - waiter = self._eof_waiter - if waiter is not None: - self._eof_waiter = None - set_exception(waiter, exc) - - def on_eof(self, callback: Callable[[], None]) -> None: - if self._eof: - try: - callback() - except Exception: - internal_logger.exception("Exception in eof callback") - else: - self._eof_callbacks.append(callback) - - def feed_eof(self) -> None: - self._eof = True - - waiter = self._waiter - if waiter is not None: - self._waiter = None - set_result(waiter, None) - - waiter = self._eof_waiter - if waiter is not None: - self._eof_waiter = None - set_result(waiter, None) - - for cb in self._eof_callbacks: - try: - cb() - except Exception: - internal_logger.exception("Exception in eof callback") - - self._eof_callbacks.clear() - - def is_eof(self) -> bool: - """Return True if 'feed_eof' was called.""" - return self._eof - - def at_eof(self) -> bool: - """Return True if the buffer is empty and 'feed_eof' was called.""" - return self._eof and not self._buffer - - async def wait_eof(self) -> None: - if self._eof: - return - - assert self._eof_waiter is None - self._eof_waiter = self._loop.create_future() - try: - await self._eof_waiter - finally: - self._eof_waiter = None - - def unread_data(self, data: bytes) -> None: - """rollback reading some data from stream, inserting it to buffer head.""" - warnings.warn( - "unread_data() is deprecated " - "and will be removed in future releases (#3260)", - DeprecationWarning, - stacklevel=2, - ) - if not data: - return - - if self._buffer_offset: - self._buffer[0] = self._buffer[0][self._buffer_offset :] - self._buffer_offset = 0 - self._size += len(data) - self._cursor -= len(data) - self._buffer.appendleft(data) - self._eof_counter = 0 - - # TODO: size is ignored, remove the param later - def feed_data(self, data: bytes, size: int = 0) -> None: - assert not self._eof, "feed_data after feed_eof" - - if not data: - return - - self._size += len(data) - self._buffer.append(data) - self.total_bytes += len(data) - - waiter = self._waiter - if waiter is not None: - self._waiter = None - set_result(waiter, None) - - if self._size > self._high_water and not self._protocol._reading_paused: - self._protocol.pause_reading() - - def begin_http_chunk_receiving(self) -> None: - if self._http_chunk_splits is None: - if self.total_bytes: - raise RuntimeError( - "Called begin_http_chunk_receiving when" "some data was already fed" - ) - self._http_chunk_splits = [] - - def end_http_chunk_receiving(self) -> None: - if self._http_chunk_splits is None: - raise RuntimeError( - "Called end_chunk_receiving without calling " - "begin_chunk_receiving first" - ) - - # self._http_chunk_splits contains logical byte offsets from start of - # the body transfer. Each offset is the offset of the end of a chunk. - # "Logical" means bytes, accessible for a user. - # If no chunks containing logical data were received, current position - # is difinitely zero. - pos = self._http_chunk_splits[-1] if self._http_chunk_splits else 0 - - if self.total_bytes == pos: - # We should not add empty chunks here. So we check for that. - # Note, when chunked + gzip is used, we can receive a chunk - # of compressed data, but that data may not be enough for gzip FSM - # to yield any uncompressed data. That's why current position may - # not change after receiving a chunk. - return - - self._http_chunk_splits.append(self.total_bytes) - - # wake up readchunk when end of http chunk received - waiter = self._waiter - if waiter is not None: - self._waiter = None - set_result(waiter, None) - - async def _wait(self, func_name: str) -> None: - # StreamReader uses a future to link the protocol feed_data() method - # to a read coroutine. Running two read coroutines at the same time - # would have an unexpected behaviour. It would not possible to know - # which coroutine would get the next data. - if self._waiter is not None: - raise RuntimeError( - "%s() called while another coroutine is " - "already waiting for incoming data" % func_name - ) - - waiter = self._waiter = self._loop.create_future() - try: - with self._timer: - await waiter - finally: - self._waiter = None - - async def readline(self) -> bytes: - return await self.readuntil() - - async def readuntil(self, separator: bytes = b"\n") -> bytes: - seplen = len(separator) - if seplen == 0: - raise ValueError("Separator should be at least one-byte string") - - if self._exception is not None: - raise self._exception - - chunk = b"" - chunk_size = 0 - not_enough = True - - while not_enough: - while self._buffer and not_enough: - offset = self._buffer_offset - ichar = self._buffer[0].find(separator, offset) + 1 - # Read from current offset to found separator or to the end. - data = self._read_nowait_chunk( - ichar - offset + seplen - 1 if ichar else -1 - ) - chunk += data - chunk_size += len(data) - if ichar: - not_enough = False - - if chunk_size > self._high_water: - raise ValueError("Chunk too big") - - if self._eof: - break - - if not_enough: - await self._wait("readuntil") - - return chunk - - async def read(self, n: int = -1) -> bytes: - if self._exception is not None: - raise self._exception - - # migration problem; with DataQueue you have to catch - # EofStream exception, so common way is to run payload.read() inside - # infinite loop. what can cause real infinite loop with StreamReader - # lets keep this code one major release. - if __debug__: - if self._eof and not self._buffer: - self._eof_counter = getattr(self, "_eof_counter", 0) + 1 - if self._eof_counter > 5: - internal_logger.warning( - "Multiple access to StreamReader in eof state, " - "might be infinite loop.", - stack_info=True, - ) - - if not n: - return b"" - - if n < 0: - # This used to just loop creating a new waiter hoping to - # collect everything in self._buffer, but that would - # deadlock if the subprocess sends more than self.limit - # bytes. So just call self.readany() until EOF. - blocks = [] - while True: - block = await self.readany() - if not block: - break - blocks.append(block) - return b"".join(blocks) - - # TODO: should be `if` instead of `while` - # because waiter maybe triggered on chunk end, - # without feeding any data - while not self._buffer and not self._eof: - await self._wait("read") - - return self._read_nowait(n) - - async def readany(self) -> bytes: - if self._exception is not None: - raise self._exception - - # TODO: should be `if` instead of `while` - # because waiter maybe triggered on chunk end, - # without feeding any data - while not self._buffer and not self._eof: - await self._wait("readany") - - return self._read_nowait(-1) - - async def readchunk(self) -> Tuple[bytes, bool]: - """Returns a tuple of (data, end_of_http_chunk). - - When chunked transfer - encoding is used, end_of_http_chunk is a boolean indicating if the end - of the data corresponds to the end of a HTTP chunk , otherwise it is - always False. - """ - while True: - if self._exception is not None: - raise self._exception - - while self._http_chunk_splits: - pos = self._http_chunk_splits.pop(0) - if pos == self._cursor: - return (b"", True) - if pos > self._cursor: - return (self._read_nowait(pos - self._cursor), True) - internal_logger.warning( - "Skipping HTTP chunk end due to data " - "consumption beyond chunk boundary" - ) - - if self._buffer: - return (self._read_nowait_chunk(-1), False) - # return (self._read_nowait(-1), False) - - if self._eof: - # Special case for signifying EOF. - # (b'', True) is not a final return value actually. - return (b"", False) - - await self._wait("readchunk") - - async def readexactly(self, n: int) -> bytes: - if self._exception is not None: - raise self._exception - - blocks: List[bytes] = [] - while n > 0: - block = await self.read(n) - if not block: - partial = b"".join(blocks) - raise asyncio.IncompleteReadError(partial, len(partial) + n) - blocks.append(block) - n -= len(block) - - return b"".join(blocks) - - def read_nowait(self, n: int = -1) -> bytes: - # default was changed to be consistent with .read(-1) - # - # I believe the most users don't know about the method and - # they are not affected. - if self._exception is not None: - raise self._exception - - if self._waiter and not self._waiter.done(): - raise RuntimeError( - "Called while some coroutine is waiting for incoming data." - ) - - return self._read_nowait(n) - - def _read_nowait_chunk(self, n: int) -> bytes: - first_buffer = self._buffer[0] - offset = self._buffer_offset - if n != -1 and len(first_buffer) - offset > n: - data = first_buffer[offset : offset + n] - self._buffer_offset += n - - elif offset: - self._buffer.popleft() - data = first_buffer[offset:] - self._buffer_offset = 0 - - else: - data = self._buffer.popleft() - - self._size -= len(data) - self._cursor += len(data) - - chunk_splits = self._http_chunk_splits - # Prevent memory leak: drop useless chunk splits - while chunk_splits and chunk_splits[0] < self._cursor: - chunk_splits.pop(0) - - if self._size < self._low_water and self._protocol._reading_paused: - self._protocol.resume_reading() - return data - - def _read_nowait(self, n: int) -> bytes: - """Read not more than n bytes, or whole buffer if n == -1""" - self._timer.assert_timeout() - - chunks = [] - while self._buffer: - chunk = self._read_nowait_chunk(n) - chunks.append(chunk) - if n != -1: - n -= len(chunk) - if n == 0: - break - - return b"".join(chunks) if chunks else b"" - - -class EmptyStreamReader(StreamReader): # lgtm [py/missing-call-to-init] - def __init__(self) -> None: - self._read_eof_chunk = False - - def __repr__(self) -> str: - return "<%s>" % self.__class__.__name__ - - def exception(self) -> Optional[BaseException]: - return None - - def set_exception(self, exc: BaseException) -> None: - pass - - def on_eof(self, callback: Callable[[], None]) -> None: - try: - callback() - except Exception: - internal_logger.exception("Exception in eof callback") - - def feed_eof(self) -> None: - pass - - def is_eof(self) -> bool: - return True - - def at_eof(self) -> bool: - return True - - async def wait_eof(self) -> None: - return - - def feed_data(self, data: bytes, n: int = 0) -> None: - pass - - async def readline(self) -> bytes: - return b"" - - async def read(self, n: int = -1) -> bytes: - return b"" - - # TODO add async def readuntil - - async def readany(self) -> bytes: - return b"" - - async def readchunk(self) -> Tuple[bytes, bool]: - if not self._read_eof_chunk: - self._read_eof_chunk = True - return (b"", False) - - return (b"", True) - - async def readexactly(self, n: int) -> bytes: - raise asyncio.IncompleteReadError(b"", n) - - def read_nowait(self, n: int = -1) -> bytes: - return b"" - - -EMPTY_PAYLOAD: Final[StreamReader] = EmptyStreamReader() - - -class DataQueue(Generic[_T]): - """DataQueue is a general-purpose blocking queue with one reader.""" - - def __init__(self, loop: asyncio.AbstractEventLoop) -> None: - self._loop = loop - self._eof = False - self._waiter: Optional[asyncio.Future[None]] = None - self._exception: Optional[BaseException] = None - self._size = 0 - self._buffer: Deque[Tuple[_T, int]] = collections.deque() - - def __len__(self) -> int: - return len(self._buffer) - - def is_eof(self) -> bool: - return self._eof - - def at_eof(self) -> bool: - return self._eof and not self._buffer - - def exception(self) -> Optional[BaseException]: - return self._exception - - def set_exception(self, exc: BaseException) -> None: - self._eof = True - self._exception = exc - - waiter = self._waiter - if waiter is not None: - self._waiter = None - set_exception(waiter, exc) - - def feed_data(self, data: _T, size: int = 0) -> None: - self._size += size - self._buffer.append((data, size)) - - waiter = self._waiter - if waiter is not None: - self._waiter = None - set_result(waiter, None) - - def feed_eof(self) -> None: - self._eof = True - - waiter = self._waiter - if waiter is not None: - self._waiter = None - set_result(waiter, None) - - async def read(self) -> _T: - if not self._buffer and not self._eof: - assert not self._waiter - self._waiter = self._loop.create_future() - try: - await self._waiter - except (asyncio.CancelledError, asyncio.TimeoutError): - self._waiter = None - raise - - if self._buffer: - data, size = self._buffer.popleft() - self._size -= size - return data - else: - if self._exception is not None: - raise self._exception - else: - raise EofStream - - def __aiter__(self) -> AsyncStreamIterator[_T]: - return AsyncStreamIterator(self.read) - - -class FlowControlDataQueue(DataQueue[_T]): - """FlowControlDataQueue resumes and pauses an underlying stream. - - It is a destination for parsed data. - """ - - def __init__( - self, protocol: BaseProtocol, limit: int, *, loop: asyncio.AbstractEventLoop - ) -> None: - super().__init__(loop=loop) - - self._protocol = protocol - self._limit = limit * 2 - - def feed_data(self, data: _T, size: int = 0) -> None: - super().feed_data(data, size) - - if self._size > self._limit and not self._protocol._reading_paused: - self._protocol.pause_reading() - - async def read(self) -> _T: - try: - return await super().read() - finally: - if self._size < self._limit and self._protocol._reading_paused: - self._protocol.resume_reading() diff --git a/.venv/Lib/site-packages/aiohttp/tcp_helpers.py b/.venv/Lib/site-packages/aiohttp/tcp_helpers.py deleted file mode 100644 index 88b2442..0000000 --- a/.venv/Lib/site-packages/aiohttp/tcp_helpers.py +++ /dev/null @@ -1,37 +0,0 @@ -"""Helper methods to tune a TCP connection""" - -import asyncio -import socket -from contextlib import suppress -from typing import Optional # noqa - -__all__ = ("tcp_keepalive", "tcp_nodelay") - - -if hasattr(socket, "SO_KEEPALIVE"): - - def tcp_keepalive(transport: asyncio.Transport) -> None: - sock = transport.get_extra_info("socket") - if sock is not None: - sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) - -else: - - def tcp_keepalive(transport: asyncio.Transport) -> None: # pragma: no cover - pass - - -def tcp_nodelay(transport: asyncio.Transport, value: bool) -> None: - sock = transport.get_extra_info("socket") - - if sock is None: - return - - if sock.family not in (socket.AF_INET, socket.AF_INET6): - return - - value = bool(value) - - # socket may be closed already, on windows OSError get raised - with suppress(OSError): - sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, value) diff --git a/.venv/Lib/site-packages/aiohttp/test_utils.py b/.venv/Lib/site-packages/aiohttp/test_utils.py deleted file mode 100644 index b5821a7..0000000 --- a/.venv/Lib/site-packages/aiohttp/test_utils.py +++ /dev/null @@ -1,675 +0,0 @@ -"""Utilities shared by tests.""" - -import asyncio -import contextlib -import gc -import inspect -import ipaddress -import os -import socket -import sys -import warnings -from abc import ABC, abstractmethod -from types import TracebackType -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Iterator, - List, - Optional, - Type, - Union, - cast, -) -from unittest import IsolatedAsyncioTestCase, mock - -from aiosignal import Signal -from multidict import CIMultiDict, CIMultiDictProxy -from yarl import URL - -import aiohttp -from aiohttp.client import _RequestContextManager, _WSRequestContextManager - -from . import ClientSession, hdrs -from .abc import AbstractCookieJar -from .client_reqrep import ClientResponse -from .client_ws import ClientWebSocketResponse -from .helpers import sentinel -from .http import HttpVersion, RawRequestMessage -from .typedefs import StrOrURL -from .web import ( - Application, - AppRunner, - BaseRunner, - Request, - Server, - ServerRunner, - SockSite, - UrlMappingMatchInfo, -) -from .web_protocol import _RequestHandler - -if TYPE_CHECKING: - from ssl import SSLContext -else: - SSLContext = None - -REUSE_ADDRESS = os.name == "posix" and sys.platform != "cygwin" - - -def get_unused_port_socket( - host: str, family: socket.AddressFamily = socket.AF_INET -) -> socket.socket: - return get_port_socket(host, 0, family) - - -def get_port_socket( - host: str, port: int, family: socket.AddressFamily -) -> socket.socket: - s = socket.socket(family, socket.SOCK_STREAM) - if REUSE_ADDRESS: - # Windows has different semantics for SO_REUSEADDR, - # so don't set it. Ref: - # https://docs.microsoft.com/en-us/windows/win32/winsock/using-so-reuseaddr-and-so-exclusiveaddruse - s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - s.bind((host, port)) - return s - - -def unused_port() -> int: - """Return a port that is unused on the current host.""" - with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: - s.bind(("127.0.0.1", 0)) - return cast(int, s.getsockname()[1]) - - -class BaseTestServer(ABC): - __test__ = False - - def __init__( - self, - *, - scheme: Union[str, object] = sentinel, - loop: Optional[asyncio.AbstractEventLoop] = None, - host: str = "127.0.0.1", - port: Optional[int] = None, - skip_url_asserts: bool = False, - socket_factory: Callable[ - [str, int, socket.AddressFamily], socket.socket - ] = get_port_socket, - **kwargs: Any, - ) -> None: - self._loop = loop - self.runner: Optional[BaseRunner] = None - self._root: Optional[URL] = None - self.host = host - self.port = port - self._closed = False - self.scheme = scheme - self.skip_url_asserts = skip_url_asserts - self.socket_factory = socket_factory - - async def start_server( - self, loop: Optional[asyncio.AbstractEventLoop] = None, **kwargs: Any - ) -> None: - if self.runner: - return - self._loop = loop - self._ssl = kwargs.pop("ssl", None) - self.runner = await self._make_runner(handler_cancellation=True, **kwargs) - await self.runner.setup() - if not self.port: - self.port = 0 - try: - version = ipaddress.ip_address(self.host).version - except ValueError: - version = 4 - family = socket.AF_INET6 if version == 6 else socket.AF_INET - _sock = self.socket_factory(self.host, self.port, family) - self.host, self.port = _sock.getsockname()[:2] - site = SockSite(self.runner, sock=_sock, ssl_context=self._ssl) - await site.start() - server = site._server - assert server is not None - sockets = server.sockets # type: ignore[attr-defined] - assert sockets is not None - self.port = sockets[0].getsockname()[1] - if self.scheme is sentinel: - if self._ssl: - scheme = "https" - else: - scheme = "http" - self.scheme = scheme - self._root = URL(f"{self.scheme}://{self.host}:{self.port}") - - @abstractmethod # pragma: no cover - async def _make_runner(self, **kwargs: Any) -> BaseRunner: - pass - - def make_url(self, path: StrOrURL) -> URL: - assert self._root is not None - url = URL(path) - if not self.skip_url_asserts: - assert not url.is_absolute() - return self._root.join(url) - else: - return URL(str(self._root) + str(path)) - - @property - def started(self) -> bool: - return self.runner is not None - - @property - def closed(self) -> bool: - return self._closed - - @property - def handler(self) -> Server: - # for backward compatibility - # web.Server instance - runner = self.runner - assert runner is not None - assert runner.server is not None - return runner.server - - async def close(self) -> None: - """Close all fixtures created by the test client. - - After that point, the TestClient is no longer usable. - - This is an idempotent function: running close multiple times - will not have any additional effects. - - close is also run when the object is garbage collected, and on - exit when used as a context manager. - - """ - if self.started and not self.closed: - assert self.runner is not None - await self.runner.cleanup() - self._root = None - self.port = None - self._closed = True - - def __enter__(self) -> None: - raise TypeError("Use async with instead") - - def __exit__( - self, - exc_type: Optional[Type[BaseException]], - exc_value: Optional[BaseException], - traceback: Optional[TracebackType], - ) -> None: - # __exit__ should exist in pair with __enter__ but never executed - pass # pragma: no cover - - async def __aenter__(self) -> "BaseTestServer": - await self.start_server(loop=self._loop) - return self - - async def __aexit__( - self, - exc_type: Optional[Type[BaseException]], - exc_value: Optional[BaseException], - traceback: Optional[TracebackType], - ) -> None: - await self.close() - - -class TestServer(BaseTestServer): - def __init__( - self, - app: Application, - *, - scheme: Union[str, object] = sentinel, - host: str = "127.0.0.1", - port: Optional[int] = None, - **kwargs: Any, - ): - self.app = app - super().__init__(scheme=scheme, host=host, port=port, **kwargs) - - async def _make_runner(self, **kwargs: Any) -> BaseRunner: - return AppRunner(self.app, **kwargs) - - -class RawTestServer(BaseTestServer): - def __init__( - self, - handler: _RequestHandler, - *, - scheme: Union[str, object] = sentinel, - host: str = "127.0.0.1", - port: Optional[int] = None, - **kwargs: Any, - ) -> None: - self._handler = handler - super().__init__(scheme=scheme, host=host, port=port, **kwargs) - - async def _make_runner(self, debug: bool = True, **kwargs: Any) -> ServerRunner: - srv = Server(self._handler, loop=self._loop, debug=debug, **kwargs) - return ServerRunner(srv, debug=debug, **kwargs) - - -class TestClient: - """ - A test client implementation. - - To write functional tests for aiohttp based servers. - - """ - - __test__ = False - - def __init__( - self, - server: BaseTestServer, - *, - cookie_jar: Optional[AbstractCookieJar] = None, - loop: Optional[asyncio.AbstractEventLoop] = None, - **kwargs: Any, - ) -> None: - if not isinstance(server, BaseTestServer): - raise TypeError( - "server must be TestServer " "instance, found type: %r" % type(server) - ) - self._server = server - self._loop = loop - if cookie_jar is None: - cookie_jar = aiohttp.CookieJar(unsafe=True, loop=loop) - self._session = ClientSession(loop=loop, cookie_jar=cookie_jar, **kwargs) - self._closed = False - self._responses: List[ClientResponse] = [] - self._websockets: List[ClientWebSocketResponse] = [] - - async def start_server(self) -> None: - await self._server.start_server(loop=self._loop) - - @property - def host(self) -> str: - return self._server.host - - @property - def port(self) -> Optional[int]: - return self._server.port - - @property - def server(self) -> BaseTestServer: - return self._server - - @property - def app(self) -> Optional[Application]: - return cast(Optional[Application], getattr(self._server, "app", None)) - - @property - def session(self) -> ClientSession: - """An internal aiohttp.ClientSession. - - Unlike the methods on the TestClient, client session requests - do not automatically include the host in the url queried, and - will require an absolute path to the resource. - - """ - return self._session - - def make_url(self, path: StrOrURL) -> URL: - return self._server.make_url(path) - - async def _request( - self, method: str, path: StrOrURL, **kwargs: Any - ) -> ClientResponse: - resp = await self._session.request(method, self.make_url(path), **kwargs) - # save it to close later - self._responses.append(resp) - return resp - - def request( - self, method: str, path: StrOrURL, **kwargs: Any - ) -> _RequestContextManager: - """Routes a request to tested http server. - - The interface is identical to aiohttp.ClientSession.request, - except the loop kwarg is overridden by the instance used by the - test server. - - """ - return _RequestContextManager(self._request(method, path, **kwargs)) - - def get(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: - """Perform an HTTP GET request.""" - return _RequestContextManager(self._request(hdrs.METH_GET, path, **kwargs)) - - def post(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: - """Perform an HTTP POST request.""" - return _RequestContextManager(self._request(hdrs.METH_POST, path, **kwargs)) - - def options(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: - """Perform an HTTP OPTIONS request.""" - return _RequestContextManager(self._request(hdrs.METH_OPTIONS, path, **kwargs)) - - def head(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: - """Perform an HTTP HEAD request.""" - return _RequestContextManager(self._request(hdrs.METH_HEAD, path, **kwargs)) - - def put(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: - """Perform an HTTP PUT request.""" - return _RequestContextManager(self._request(hdrs.METH_PUT, path, **kwargs)) - - def patch(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: - """Perform an HTTP PATCH request.""" - return _RequestContextManager(self._request(hdrs.METH_PATCH, path, **kwargs)) - - def delete(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: - """Perform an HTTP PATCH request.""" - return _RequestContextManager(self._request(hdrs.METH_DELETE, path, **kwargs)) - - def ws_connect(self, path: StrOrURL, **kwargs: Any) -> _WSRequestContextManager: - """Initiate websocket connection. - - The api corresponds to aiohttp.ClientSession.ws_connect. - - """ - return _WSRequestContextManager(self._ws_connect(path, **kwargs)) - - async def _ws_connect( - self, path: StrOrURL, **kwargs: Any - ) -> ClientWebSocketResponse: - ws = await self._session.ws_connect(self.make_url(path), **kwargs) - self._websockets.append(ws) - return ws - - async def close(self) -> None: - """Close all fixtures created by the test client. - - After that point, the TestClient is no longer usable. - - This is an idempotent function: running close multiple times - will not have any additional effects. - - close is also run on exit when used as a(n) (asynchronous) - context manager. - - """ - if not self._closed: - for resp in self._responses: - resp.close() - for ws in self._websockets: - await ws.close() - await self._session.close() - await self._server.close() - self._closed = True - - def __enter__(self) -> None: - raise TypeError("Use async with instead") - - def __exit__( - self, - exc_type: Optional[Type[BaseException]], - exc: Optional[BaseException], - tb: Optional[TracebackType], - ) -> None: - # __exit__ should exist in pair with __enter__ but never executed - pass # pragma: no cover - - async def __aenter__(self) -> "TestClient": - await self.start_server() - return self - - async def __aexit__( - self, - exc_type: Optional[Type[BaseException]], - exc: Optional[BaseException], - tb: Optional[TracebackType], - ) -> None: - await self.close() - - -class AioHTTPTestCase(IsolatedAsyncioTestCase): - """A base class to allow for unittest web applications using aiohttp. - - Provides the following: - - * self.client (aiohttp.test_utils.TestClient): an aiohttp test client. - * self.loop (asyncio.BaseEventLoop): the event loop in which the - application and server are running. - * self.app (aiohttp.web.Application): the application returned by - self.get_application() - - Note that the TestClient's methods are asynchronous: you have to - execute function on the test client using asynchronous methods. - """ - - async def get_application(self) -> Application: - """Get application. - - This method should be overridden - to return the aiohttp.web.Application - object to test. - """ - return self.get_app() - - def get_app(self) -> Application: - """Obsolete method used to constructing web application. - - Use .get_application() coroutine instead. - """ - raise RuntimeError("Did you forget to define get_application()?") - - async def asyncSetUp(self) -> None: - self.loop = asyncio.get_running_loop() - return await self.setUpAsync() - - async def setUpAsync(self) -> None: - self.app = await self.get_application() - self.server = await self.get_server(self.app) - self.client = await self.get_client(self.server) - - await self.client.start_server() - - async def asyncTearDown(self) -> None: - return await self.tearDownAsync() - - async def tearDownAsync(self) -> None: - await self.client.close() - - async def get_server(self, app: Application) -> TestServer: - """Return a TestServer instance.""" - return TestServer(app, loop=self.loop) - - async def get_client(self, server: TestServer) -> TestClient: - """Return a TestClient instance.""" - return TestClient(server, loop=self.loop) - - -def unittest_run_loop(func: Any, *args: Any, **kwargs: Any) -> Any: - """ - A decorator dedicated to use with asynchronous AioHTTPTestCase test methods. - - In 3.8+, this does nothing. - """ - warnings.warn( - "Decorator `@unittest_run_loop` is no longer needed in aiohttp 3.8+", - DeprecationWarning, - stacklevel=2, - ) - return func - - -_LOOP_FACTORY = Callable[[], asyncio.AbstractEventLoop] - - -@contextlib.contextmanager -def loop_context( - loop_factory: _LOOP_FACTORY = asyncio.new_event_loop, fast: bool = False -) -> Iterator[asyncio.AbstractEventLoop]: - """A contextmanager that creates an event_loop, for test purposes. - - Handles the creation and cleanup of a test loop. - """ - loop = setup_test_loop(loop_factory) - yield loop - teardown_test_loop(loop, fast=fast) - - -def setup_test_loop( - loop_factory: _LOOP_FACTORY = asyncio.new_event_loop, -) -> asyncio.AbstractEventLoop: - """Create and return an asyncio.BaseEventLoop instance. - - The caller should also call teardown_test_loop, - once they are done with the loop. - """ - loop = loop_factory() - asyncio.set_event_loop(loop) - return loop - - -def teardown_test_loop(loop: asyncio.AbstractEventLoop, fast: bool = False) -> None: - """Teardown and cleanup an event_loop created by setup_test_loop.""" - closed = loop.is_closed() - if not closed: - loop.call_soon(loop.stop) - loop.run_forever() - loop.close() - - if not fast: - gc.collect() - - asyncio.set_event_loop(None) - - -def _create_app_mock() -> mock.MagicMock: - def get_dict(app: Any, key: str) -> Any: - return app.__app_dict[key] - - def set_dict(app: Any, key: str, value: Any) -> None: - app.__app_dict[key] = value - - app = mock.MagicMock(spec=Application) - app.__app_dict = {} - app.__getitem__ = get_dict - app.__setitem__ = set_dict - - app._debug = False - app.on_response_prepare = Signal(app) - app.on_response_prepare.freeze() - return app - - -def _create_transport(sslcontext: Optional[SSLContext] = None) -> mock.Mock: - transport = mock.Mock() - - def get_extra_info(key: str) -> Optional[SSLContext]: - if key == "sslcontext": - return sslcontext - else: - return None - - transport.get_extra_info.side_effect = get_extra_info - return transport - - -def make_mocked_request( - method: str, - path: str, - headers: Any = None, - *, - match_info: Any = sentinel, - version: HttpVersion = HttpVersion(1, 1), - closing: bool = False, - app: Any = None, - writer: Any = sentinel, - protocol: Any = sentinel, - transport: Any = sentinel, - payload: Any = sentinel, - sslcontext: Optional[SSLContext] = None, - client_max_size: int = 1024**2, - loop: Any = ..., -) -> Request: - """Creates mocked web.Request testing purposes. - - Useful in unit tests, when spinning full web server is overkill or - specific conditions and errors are hard to trigger. - """ - task = mock.Mock() - if loop is ...: - loop = mock.Mock() - loop.create_future.return_value = () - - if version < HttpVersion(1, 1): - closing = True - - if headers: - headers = CIMultiDictProxy(CIMultiDict(headers)) - raw_hdrs = tuple( - (k.encode("utf-8"), v.encode("utf-8")) for k, v in headers.items() - ) - else: - headers = CIMultiDictProxy(CIMultiDict()) - raw_hdrs = () - - chunked = "chunked" in headers.get(hdrs.TRANSFER_ENCODING, "").lower() - - message = RawRequestMessage( - method, - path, - version, - headers, - raw_hdrs, - closing, - None, - False, - chunked, - URL(path), - ) - if app is None: - app = _create_app_mock() - - if transport is sentinel: - transport = _create_transport(sslcontext) - - if protocol is sentinel: - protocol = mock.Mock() - protocol.transport = transport - - if writer is sentinel: - writer = mock.Mock() - writer.write_headers = make_mocked_coro(None) - writer.write = make_mocked_coro(None) - writer.write_eof = make_mocked_coro(None) - writer.drain = make_mocked_coro(None) - writer.transport = transport - - protocol.transport = transport - protocol.writer = writer - - if payload is sentinel: - payload = mock.Mock() - - req = Request( - message, payload, protocol, writer, task, loop, client_max_size=client_max_size - ) - - match_info = UrlMappingMatchInfo( - {} if match_info is sentinel else match_info, mock.Mock() - ) - match_info.add_app(app) - req._match_info = match_info - - return req - - -def make_mocked_coro( - return_value: Any = sentinel, raise_exception: Any = sentinel -) -> Any: - """Creates a coroutine mock.""" - - async def mock_coro(*args: Any, **kwargs: Any) -> Any: - if raise_exception is not sentinel: - raise raise_exception - if not inspect.isawaitable(return_value): - return return_value - await return_value - - return mock.Mock(wraps=mock_coro) diff --git a/.venv/Lib/site-packages/aiohttp/tracing.py b/.venv/Lib/site-packages/aiohttp/tracing.py deleted file mode 100644 index 62847a0..0000000 --- a/.venv/Lib/site-packages/aiohttp/tracing.py +++ /dev/null @@ -1,471 +0,0 @@ -from types import SimpleNamespace -from typing import TYPE_CHECKING, Awaitable, Optional, Protocol, Type, TypeVar - -import attr -from aiosignal import Signal -from multidict import CIMultiDict -from yarl import URL - -from .client_reqrep import ClientResponse - -if TYPE_CHECKING: - from .client import ClientSession - - _ParamT_contra = TypeVar("_ParamT_contra", contravariant=True) - - class _SignalCallback(Protocol[_ParamT_contra]): - def __call__( - self, - __client_session: ClientSession, - __trace_config_ctx: SimpleNamespace, - __params: _ParamT_contra, - ) -> Awaitable[None]: - ... - - -__all__ = ( - "TraceConfig", - "TraceRequestStartParams", - "TraceRequestEndParams", - "TraceRequestExceptionParams", - "TraceConnectionQueuedStartParams", - "TraceConnectionQueuedEndParams", - "TraceConnectionCreateStartParams", - "TraceConnectionCreateEndParams", - "TraceConnectionReuseconnParams", - "TraceDnsResolveHostStartParams", - "TraceDnsResolveHostEndParams", - "TraceDnsCacheHitParams", - "TraceDnsCacheMissParams", - "TraceRequestRedirectParams", - "TraceRequestChunkSentParams", - "TraceResponseChunkReceivedParams", - "TraceRequestHeadersSentParams", -) - - -class TraceConfig: - """First-class used to trace requests launched via ClientSession objects.""" - - def __init__( - self, trace_config_ctx_factory: Type[SimpleNamespace] = SimpleNamespace - ) -> None: - self._on_request_start: Signal[ - _SignalCallback[TraceRequestStartParams] - ] = Signal(self) - self._on_request_chunk_sent: Signal[ - _SignalCallback[TraceRequestChunkSentParams] - ] = Signal(self) - self._on_response_chunk_received: Signal[ - _SignalCallback[TraceResponseChunkReceivedParams] - ] = Signal(self) - self._on_request_end: Signal[_SignalCallback[TraceRequestEndParams]] = Signal( - self - ) - self._on_request_exception: Signal[ - _SignalCallback[TraceRequestExceptionParams] - ] = Signal(self) - self._on_request_redirect: Signal[ - _SignalCallback[TraceRequestRedirectParams] - ] = Signal(self) - self._on_connection_queued_start: Signal[ - _SignalCallback[TraceConnectionQueuedStartParams] - ] = Signal(self) - self._on_connection_queued_end: Signal[ - _SignalCallback[TraceConnectionQueuedEndParams] - ] = Signal(self) - self._on_connection_create_start: Signal[ - _SignalCallback[TraceConnectionCreateStartParams] - ] = Signal(self) - self._on_connection_create_end: Signal[ - _SignalCallback[TraceConnectionCreateEndParams] - ] = Signal(self) - self._on_connection_reuseconn: Signal[ - _SignalCallback[TraceConnectionReuseconnParams] - ] = Signal(self) - self._on_dns_resolvehost_start: Signal[ - _SignalCallback[TraceDnsResolveHostStartParams] - ] = Signal(self) - self._on_dns_resolvehost_end: Signal[ - _SignalCallback[TraceDnsResolveHostEndParams] - ] = Signal(self) - self._on_dns_cache_hit: Signal[ - _SignalCallback[TraceDnsCacheHitParams] - ] = Signal(self) - self._on_dns_cache_miss: Signal[ - _SignalCallback[TraceDnsCacheMissParams] - ] = Signal(self) - self._on_request_headers_sent: Signal[ - _SignalCallback[TraceRequestHeadersSentParams] - ] = Signal(self) - - self._trace_config_ctx_factory = trace_config_ctx_factory - - def trace_config_ctx( - self, trace_request_ctx: Optional[SimpleNamespace] = None - ) -> SimpleNamespace: - """Return a new trace_config_ctx instance""" - return self._trace_config_ctx_factory(trace_request_ctx=trace_request_ctx) - - def freeze(self) -> None: - self._on_request_start.freeze() - self._on_request_chunk_sent.freeze() - self._on_response_chunk_received.freeze() - self._on_request_end.freeze() - self._on_request_exception.freeze() - self._on_request_redirect.freeze() - self._on_connection_queued_start.freeze() - self._on_connection_queued_end.freeze() - self._on_connection_create_start.freeze() - self._on_connection_create_end.freeze() - self._on_connection_reuseconn.freeze() - self._on_dns_resolvehost_start.freeze() - self._on_dns_resolvehost_end.freeze() - self._on_dns_cache_hit.freeze() - self._on_dns_cache_miss.freeze() - self._on_request_headers_sent.freeze() - - @property - def on_request_start(self) -> "Signal[_SignalCallback[TraceRequestStartParams]]": - return self._on_request_start - - @property - def on_request_chunk_sent( - self, - ) -> "Signal[_SignalCallback[TraceRequestChunkSentParams]]": - return self._on_request_chunk_sent - - @property - def on_response_chunk_received( - self, - ) -> "Signal[_SignalCallback[TraceResponseChunkReceivedParams]]": - return self._on_response_chunk_received - - @property - def on_request_end(self) -> "Signal[_SignalCallback[TraceRequestEndParams]]": - return self._on_request_end - - @property - def on_request_exception( - self, - ) -> "Signal[_SignalCallback[TraceRequestExceptionParams]]": - return self._on_request_exception - - @property - def on_request_redirect( - self, - ) -> "Signal[_SignalCallback[TraceRequestRedirectParams]]": - return self._on_request_redirect - - @property - def on_connection_queued_start( - self, - ) -> "Signal[_SignalCallback[TraceConnectionQueuedStartParams]]": - return self._on_connection_queued_start - - @property - def on_connection_queued_end( - self, - ) -> "Signal[_SignalCallback[TraceConnectionQueuedEndParams]]": - return self._on_connection_queued_end - - @property - def on_connection_create_start( - self, - ) -> "Signal[_SignalCallback[TraceConnectionCreateStartParams]]": - return self._on_connection_create_start - - @property - def on_connection_create_end( - self, - ) -> "Signal[_SignalCallback[TraceConnectionCreateEndParams]]": - return self._on_connection_create_end - - @property - def on_connection_reuseconn( - self, - ) -> "Signal[_SignalCallback[TraceConnectionReuseconnParams]]": - return self._on_connection_reuseconn - - @property - def on_dns_resolvehost_start( - self, - ) -> "Signal[_SignalCallback[TraceDnsResolveHostStartParams]]": - return self._on_dns_resolvehost_start - - @property - def on_dns_resolvehost_end( - self, - ) -> "Signal[_SignalCallback[TraceDnsResolveHostEndParams]]": - return self._on_dns_resolvehost_end - - @property - def on_dns_cache_hit(self) -> "Signal[_SignalCallback[TraceDnsCacheHitParams]]": - return self._on_dns_cache_hit - - @property - def on_dns_cache_miss(self) -> "Signal[_SignalCallback[TraceDnsCacheMissParams]]": - return self._on_dns_cache_miss - - @property - def on_request_headers_sent( - self, - ) -> "Signal[_SignalCallback[TraceRequestHeadersSentParams]]": - return self._on_request_headers_sent - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class TraceRequestStartParams: - """Parameters sent by the `on_request_start` signal""" - - method: str - url: URL - headers: "CIMultiDict[str]" - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class TraceRequestChunkSentParams: - """Parameters sent by the `on_request_chunk_sent` signal""" - - method: str - url: URL - chunk: bytes - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class TraceResponseChunkReceivedParams: - """Parameters sent by the `on_response_chunk_received` signal""" - - method: str - url: URL - chunk: bytes - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class TraceRequestEndParams: - """Parameters sent by the `on_request_end` signal""" - - method: str - url: URL - headers: "CIMultiDict[str]" - response: ClientResponse - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class TraceRequestExceptionParams: - """Parameters sent by the `on_request_exception` signal""" - - method: str - url: URL - headers: "CIMultiDict[str]" - exception: BaseException - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class TraceRequestRedirectParams: - """Parameters sent by the `on_request_redirect` signal""" - - method: str - url: URL - headers: "CIMultiDict[str]" - response: ClientResponse - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class TraceConnectionQueuedStartParams: - """Parameters sent by the `on_connection_queued_start` signal""" - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class TraceConnectionQueuedEndParams: - """Parameters sent by the `on_connection_queued_end` signal""" - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class TraceConnectionCreateStartParams: - """Parameters sent by the `on_connection_create_start` signal""" - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class TraceConnectionCreateEndParams: - """Parameters sent by the `on_connection_create_end` signal""" - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class TraceConnectionReuseconnParams: - """Parameters sent by the `on_connection_reuseconn` signal""" - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class TraceDnsResolveHostStartParams: - """Parameters sent by the `on_dns_resolvehost_start` signal""" - - host: str - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class TraceDnsResolveHostEndParams: - """Parameters sent by the `on_dns_resolvehost_end` signal""" - - host: str - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class TraceDnsCacheHitParams: - """Parameters sent by the `on_dns_cache_hit` signal""" - - host: str - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class TraceDnsCacheMissParams: - """Parameters sent by the `on_dns_cache_miss` signal""" - - host: str - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class TraceRequestHeadersSentParams: - """Parameters sent by the `on_request_headers_sent` signal""" - - method: str - url: URL - headers: "CIMultiDict[str]" - - -class Trace: - """Internal dependency holder class. - - Used to keep together the main dependencies used - at the moment of send a signal. - """ - - def __init__( - self, - session: "ClientSession", - trace_config: TraceConfig, - trace_config_ctx: SimpleNamespace, - ) -> None: - self._trace_config = trace_config - self._trace_config_ctx = trace_config_ctx - self._session = session - - async def send_request_start( - self, method: str, url: URL, headers: "CIMultiDict[str]" - ) -> None: - return await self._trace_config.on_request_start.send( - self._session, - self._trace_config_ctx, - TraceRequestStartParams(method, url, headers), - ) - - async def send_request_chunk_sent( - self, method: str, url: URL, chunk: bytes - ) -> None: - return await self._trace_config.on_request_chunk_sent.send( - self._session, - self._trace_config_ctx, - TraceRequestChunkSentParams(method, url, chunk), - ) - - async def send_response_chunk_received( - self, method: str, url: URL, chunk: bytes - ) -> None: - return await self._trace_config.on_response_chunk_received.send( - self._session, - self._trace_config_ctx, - TraceResponseChunkReceivedParams(method, url, chunk), - ) - - async def send_request_end( - self, - method: str, - url: URL, - headers: "CIMultiDict[str]", - response: ClientResponse, - ) -> None: - return await self._trace_config.on_request_end.send( - self._session, - self._trace_config_ctx, - TraceRequestEndParams(method, url, headers, response), - ) - - async def send_request_exception( - self, - method: str, - url: URL, - headers: "CIMultiDict[str]", - exception: BaseException, - ) -> None: - return await self._trace_config.on_request_exception.send( - self._session, - self._trace_config_ctx, - TraceRequestExceptionParams(method, url, headers, exception), - ) - - async def send_request_redirect( - self, - method: str, - url: URL, - headers: "CIMultiDict[str]", - response: ClientResponse, - ) -> None: - return await self._trace_config._on_request_redirect.send( - self._session, - self._trace_config_ctx, - TraceRequestRedirectParams(method, url, headers, response), - ) - - async def send_connection_queued_start(self) -> None: - return await self._trace_config.on_connection_queued_start.send( - self._session, self._trace_config_ctx, TraceConnectionQueuedStartParams() - ) - - async def send_connection_queued_end(self) -> None: - return await self._trace_config.on_connection_queued_end.send( - self._session, self._trace_config_ctx, TraceConnectionQueuedEndParams() - ) - - async def send_connection_create_start(self) -> None: - return await self._trace_config.on_connection_create_start.send( - self._session, self._trace_config_ctx, TraceConnectionCreateStartParams() - ) - - async def send_connection_create_end(self) -> None: - return await self._trace_config.on_connection_create_end.send( - self._session, self._trace_config_ctx, TraceConnectionCreateEndParams() - ) - - async def send_connection_reuseconn(self) -> None: - return await self._trace_config.on_connection_reuseconn.send( - self._session, self._trace_config_ctx, TraceConnectionReuseconnParams() - ) - - async def send_dns_resolvehost_start(self, host: str) -> None: - return await self._trace_config.on_dns_resolvehost_start.send( - self._session, self._trace_config_ctx, TraceDnsResolveHostStartParams(host) - ) - - async def send_dns_resolvehost_end(self, host: str) -> None: - return await self._trace_config.on_dns_resolvehost_end.send( - self._session, self._trace_config_ctx, TraceDnsResolveHostEndParams(host) - ) - - async def send_dns_cache_hit(self, host: str) -> None: - return await self._trace_config.on_dns_cache_hit.send( - self._session, self._trace_config_ctx, TraceDnsCacheHitParams(host) - ) - - async def send_dns_cache_miss(self, host: str) -> None: - return await self._trace_config.on_dns_cache_miss.send( - self._session, self._trace_config_ctx, TraceDnsCacheMissParams(host) - ) - - async def send_request_headers( - self, method: str, url: URL, headers: "CIMultiDict[str]" - ) -> None: - return await self._trace_config._on_request_headers_sent.send( - self._session, - self._trace_config_ctx, - TraceRequestHeadersSentParams(method, url, headers), - ) diff --git a/.venv/Lib/site-packages/aiohttp/typedefs.py b/.venv/Lib/site-packages/aiohttp/typedefs.py deleted file mode 100644 index 5e963e1..0000000 --- a/.venv/Lib/site-packages/aiohttp/typedefs.py +++ /dev/null @@ -1,54 +0,0 @@ -import json -import os -from typing import ( - TYPE_CHECKING, - Any, - Awaitable, - Callable, - Iterable, - Mapping, - Tuple, - Union, -) - -from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy, istr -from yarl import URL - -DEFAULT_JSON_ENCODER = json.dumps -DEFAULT_JSON_DECODER = json.loads - -if TYPE_CHECKING: - _CIMultiDict = CIMultiDict[str] - _CIMultiDictProxy = CIMultiDictProxy[str] - _MultiDict = MultiDict[str] - _MultiDictProxy = MultiDictProxy[str] - from http.cookies import BaseCookie, Morsel - - from .web import Request, StreamResponse -else: - _CIMultiDict = CIMultiDict - _CIMultiDictProxy = CIMultiDictProxy - _MultiDict = MultiDict - _MultiDictProxy = MultiDictProxy - -Byteish = Union[bytes, bytearray, memoryview] -JSONEncoder = Callable[[Any], str] -JSONDecoder = Callable[[str], Any] -LooseHeaders = Union[Mapping[Union[str, istr], str], _CIMultiDict, _CIMultiDictProxy] -RawHeaders = Tuple[Tuple[bytes, bytes], ...] -StrOrURL = Union[str, URL] - -LooseCookiesMappings = Mapping[str, Union[str, "BaseCookie[str]", "Morsel[Any]"]] -LooseCookiesIterables = Iterable[ - Tuple[str, Union[str, "BaseCookie[str]", "Morsel[Any]"]] -] -LooseCookies = Union[ - LooseCookiesMappings, - LooseCookiesIterables, - "BaseCookie[str]", -] - -Handler = Callable[["Request"], Awaitable["StreamResponse"]] -Middleware = Callable[["Request", Handler], Awaitable["StreamResponse"]] - -PathLike = Union[str, "os.PathLike[str]"] diff --git a/.venv/Lib/site-packages/aiohttp/web.py b/.venv/Lib/site-packages/aiohttp/web.py deleted file mode 100644 index e911650..0000000 --- a/.venv/Lib/site-packages/aiohttp/web.py +++ /dev/null @@ -1,616 +0,0 @@ -import asyncio -import logging -import os -import socket -import sys -import warnings -from argparse import ArgumentParser -from collections.abc import Iterable -from contextlib import suppress -from functools import partial -from importlib import import_module -from typing import ( - Any, - Awaitable, - Callable, - Iterable as TypingIterable, - List, - Optional, - Set, - Type, - Union, - cast, -) -from weakref import WeakSet - -from .abc import AbstractAccessLogger -from .helpers import AppKey as AppKey -from .log import access_logger -from .typedefs import PathLike -from .web_app import Application as Application, CleanupError as CleanupError -from .web_exceptions import ( - HTTPAccepted as HTTPAccepted, - HTTPBadGateway as HTTPBadGateway, - HTTPBadRequest as HTTPBadRequest, - HTTPClientError as HTTPClientError, - HTTPConflict as HTTPConflict, - HTTPCreated as HTTPCreated, - HTTPError as HTTPError, - HTTPException as HTTPException, - HTTPExpectationFailed as HTTPExpectationFailed, - HTTPFailedDependency as HTTPFailedDependency, - HTTPForbidden as HTTPForbidden, - HTTPFound as HTTPFound, - HTTPGatewayTimeout as HTTPGatewayTimeout, - HTTPGone as HTTPGone, - HTTPInsufficientStorage as HTTPInsufficientStorage, - HTTPInternalServerError as HTTPInternalServerError, - HTTPLengthRequired as HTTPLengthRequired, - HTTPMethodNotAllowed as HTTPMethodNotAllowed, - HTTPMisdirectedRequest as HTTPMisdirectedRequest, - HTTPMove as HTTPMove, - HTTPMovedPermanently as HTTPMovedPermanently, - HTTPMultipleChoices as HTTPMultipleChoices, - HTTPNetworkAuthenticationRequired as HTTPNetworkAuthenticationRequired, - HTTPNoContent as HTTPNoContent, - HTTPNonAuthoritativeInformation as HTTPNonAuthoritativeInformation, - HTTPNotAcceptable as HTTPNotAcceptable, - HTTPNotExtended as HTTPNotExtended, - HTTPNotFound as HTTPNotFound, - HTTPNotImplemented as HTTPNotImplemented, - HTTPNotModified as HTTPNotModified, - HTTPOk as HTTPOk, - HTTPPartialContent as HTTPPartialContent, - HTTPPaymentRequired as HTTPPaymentRequired, - HTTPPermanentRedirect as HTTPPermanentRedirect, - HTTPPreconditionFailed as HTTPPreconditionFailed, - HTTPPreconditionRequired as HTTPPreconditionRequired, - HTTPProxyAuthenticationRequired as HTTPProxyAuthenticationRequired, - HTTPRedirection as HTTPRedirection, - HTTPRequestEntityTooLarge as HTTPRequestEntityTooLarge, - HTTPRequestHeaderFieldsTooLarge as HTTPRequestHeaderFieldsTooLarge, - HTTPRequestRangeNotSatisfiable as HTTPRequestRangeNotSatisfiable, - HTTPRequestTimeout as HTTPRequestTimeout, - HTTPRequestURITooLong as HTTPRequestURITooLong, - HTTPResetContent as HTTPResetContent, - HTTPSeeOther as HTTPSeeOther, - HTTPServerError as HTTPServerError, - HTTPServiceUnavailable as HTTPServiceUnavailable, - HTTPSuccessful as HTTPSuccessful, - HTTPTemporaryRedirect as HTTPTemporaryRedirect, - HTTPTooManyRequests as HTTPTooManyRequests, - HTTPUnauthorized as HTTPUnauthorized, - HTTPUnavailableForLegalReasons as HTTPUnavailableForLegalReasons, - HTTPUnprocessableEntity as HTTPUnprocessableEntity, - HTTPUnsupportedMediaType as HTTPUnsupportedMediaType, - HTTPUpgradeRequired as HTTPUpgradeRequired, - HTTPUseProxy as HTTPUseProxy, - HTTPVariantAlsoNegotiates as HTTPVariantAlsoNegotiates, - HTTPVersionNotSupported as HTTPVersionNotSupported, - NotAppKeyWarning as NotAppKeyWarning, -) -from .web_fileresponse import FileResponse as FileResponse -from .web_log import AccessLogger -from .web_middlewares import ( - middleware as middleware, - normalize_path_middleware as normalize_path_middleware, -) -from .web_protocol import ( - PayloadAccessError as PayloadAccessError, - RequestHandler as RequestHandler, - RequestPayloadError as RequestPayloadError, -) -from .web_request import ( - BaseRequest as BaseRequest, - FileField as FileField, - Request as Request, -) -from .web_response import ( - ContentCoding as ContentCoding, - Response as Response, - StreamResponse as StreamResponse, - json_response as json_response, -) -from .web_routedef import ( - AbstractRouteDef as AbstractRouteDef, - RouteDef as RouteDef, - RouteTableDef as RouteTableDef, - StaticDef as StaticDef, - delete as delete, - get as get, - head as head, - options as options, - patch as patch, - post as post, - put as put, - route as route, - static as static, - view as view, -) -from .web_runner import ( - AppRunner as AppRunner, - BaseRunner as BaseRunner, - BaseSite as BaseSite, - GracefulExit as GracefulExit, - NamedPipeSite as NamedPipeSite, - ServerRunner as ServerRunner, - SockSite as SockSite, - TCPSite as TCPSite, - UnixSite as UnixSite, -) -from .web_server import Server as Server -from .web_urldispatcher import ( - AbstractResource as AbstractResource, - AbstractRoute as AbstractRoute, - DynamicResource as DynamicResource, - PlainResource as PlainResource, - PrefixedSubAppResource as PrefixedSubAppResource, - Resource as Resource, - ResourceRoute as ResourceRoute, - StaticResource as StaticResource, - UrlDispatcher as UrlDispatcher, - UrlMappingMatchInfo as UrlMappingMatchInfo, - View as View, -) -from .web_ws import ( - WebSocketReady as WebSocketReady, - WebSocketResponse as WebSocketResponse, - WSMsgType as WSMsgType, -) - -__all__ = ( - # web_app - "AppKey", - "Application", - "CleanupError", - # web_exceptions - "NotAppKeyWarning", - "HTTPAccepted", - "HTTPBadGateway", - "HTTPBadRequest", - "HTTPClientError", - "HTTPConflict", - "HTTPCreated", - "HTTPError", - "HTTPException", - "HTTPExpectationFailed", - "HTTPFailedDependency", - "HTTPForbidden", - "HTTPFound", - "HTTPGatewayTimeout", - "HTTPGone", - "HTTPInsufficientStorage", - "HTTPInternalServerError", - "HTTPLengthRequired", - "HTTPMethodNotAllowed", - "HTTPMisdirectedRequest", - "HTTPMove", - "HTTPMovedPermanently", - "HTTPMultipleChoices", - "HTTPNetworkAuthenticationRequired", - "HTTPNoContent", - "HTTPNonAuthoritativeInformation", - "HTTPNotAcceptable", - "HTTPNotExtended", - "HTTPNotFound", - "HTTPNotImplemented", - "HTTPNotModified", - "HTTPOk", - "HTTPPartialContent", - "HTTPPaymentRequired", - "HTTPPermanentRedirect", - "HTTPPreconditionFailed", - "HTTPPreconditionRequired", - "HTTPProxyAuthenticationRequired", - "HTTPRedirection", - "HTTPRequestEntityTooLarge", - "HTTPRequestHeaderFieldsTooLarge", - "HTTPRequestRangeNotSatisfiable", - "HTTPRequestTimeout", - "HTTPRequestURITooLong", - "HTTPResetContent", - "HTTPSeeOther", - "HTTPServerError", - "HTTPServiceUnavailable", - "HTTPSuccessful", - "HTTPTemporaryRedirect", - "HTTPTooManyRequests", - "HTTPUnauthorized", - "HTTPUnavailableForLegalReasons", - "HTTPUnprocessableEntity", - "HTTPUnsupportedMediaType", - "HTTPUpgradeRequired", - "HTTPUseProxy", - "HTTPVariantAlsoNegotiates", - "HTTPVersionNotSupported", - # web_fileresponse - "FileResponse", - # web_middlewares - "middleware", - "normalize_path_middleware", - # web_protocol - "PayloadAccessError", - "RequestHandler", - "RequestPayloadError", - # web_request - "BaseRequest", - "FileField", - "Request", - # web_response - "ContentCoding", - "Response", - "StreamResponse", - "json_response", - # web_routedef - "AbstractRouteDef", - "RouteDef", - "RouteTableDef", - "StaticDef", - "delete", - "get", - "head", - "options", - "patch", - "post", - "put", - "route", - "static", - "view", - # web_runner - "AppRunner", - "BaseRunner", - "BaseSite", - "GracefulExit", - "ServerRunner", - "SockSite", - "TCPSite", - "UnixSite", - "NamedPipeSite", - # web_server - "Server", - # web_urldispatcher - "AbstractResource", - "AbstractRoute", - "DynamicResource", - "PlainResource", - "PrefixedSubAppResource", - "Resource", - "ResourceRoute", - "StaticResource", - "UrlDispatcher", - "UrlMappingMatchInfo", - "View", - # web_ws - "WebSocketReady", - "WebSocketResponse", - "WSMsgType", - # web - "run_app", -) - - -try: - from ssl import SSLContext -except ImportError: # pragma: no cover - SSLContext = Any # type: ignore[misc,assignment] - -# Only display warning when using -Wdefault, -We, -X dev or similar. -warnings.filterwarnings("ignore", category=NotAppKeyWarning, append=True) - -HostSequence = TypingIterable[str] - - -async def _run_app( - app: Union[Application, Awaitable[Application]], - *, - host: Optional[Union[str, HostSequence]] = None, - port: Optional[int] = None, - path: Union[PathLike, TypingIterable[PathLike], None] = None, - sock: Optional[Union[socket.socket, TypingIterable[socket.socket]]] = None, - shutdown_timeout: float = 60.0, - keepalive_timeout: float = 75.0, - ssl_context: Optional[SSLContext] = None, - print: Optional[Callable[..., None]] = print, - backlog: int = 128, - access_log_class: Type[AbstractAccessLogger] = AccessLogger, - access_log_format: str = AccessLogger.LOG_FORMAT, - access_log: Optional[logging.Logger] = access_logger, - handle_signals: bool = True, - reuse_address: Optional[bool] = None, - reuse_port: Optional[bool] = None, - handler_cancellation: bool = False, -) -> None: - async def wait( - starting_tasks: "WeakSet[asyncio.Task[object]]", shutdown_timeout: float - ) -> None: - # Wait for pending tasks for a given time limit. - t = asyncio.current_task() - assert t is not None - starting_tasks.add(t) - with suppress(asyncio.TimeoutError): - await asyncio.wait_for(_wait(starting_tasks), timeout=shutdown_timeout) - - async def _wait(exclude: "WeakSet[asyncio.Task[object]]") -> None: - t = asyncio.current_task() - assert t is not None - exclude.add(t) - while tasks := asyncio.all_tasks().difference(exclude): - await asyncio.wait(tasks) - - # An internal function to actually do all dirty job for application running - if asyncio.iscoroutine(app): - app = await app - - app = cast(Application, app) - - runner = AppRunner( - app, - handle_signals=handle_signals, - access_log_class=access_log_class, - access_log_format=access_log_format, - access_log=access_log, - keepalive_timeout=keepalive_timeout, - shutdown_timeout=shutdown_timeout, - handler_cancellation=handler_cancellation, - ) - - await runner.setup() - # On shutdown we want to avoid waiting on tasks which run forever. - # It's very likely that all tasks which run forever will have been created by - # the time we have completed the application startup (in runner.setup()), - # so we just record all running tasks here and exclude them later. - starting_tasks: "WeakSet[asyncio.Task[object]]" = WeakSet(asyncio.all_tasks()) - runner.shutdown_callback = partial(wait, starting_tasks, shutdown_timeout) - - sites: List[BaseSite] = [] - - try: - if host is not None: - if isinstance(host, (str, bytes, bytearray, memoryview)): - sites.append( - TCPSite( - runner, - host, - port, - ssl_context=ssl_context, - backlog=backlog, - reuse_address=reuse_address, - reuse_port=reuse_port, - ) - ) - else: - for h in host: - sites.append( - TCPSite( - runner, - h, - port, - ssl_context=ssl_context, - backlog=backlog, - reuse_address=reuse_address, - reuse_port=reuse_port, - ) - ) - elif path is None and sock is None or port is not None: - sites.append( - TCPSite( - runner, - port=port, - ssl_context=ssl_context, - backlog=backlog, - reuse_address=reuse_address, - reuse_port=reuse_port, - ) - ) - - if path is not None: - if isinstance(path, (str, os.PathLike)): - sites.append( - UnixSite( - runner, - path, - ssl_context=ssl_context, - backlog=backlog, - ) - ) - else: - for p in path: - sites.append( - UnixSite( - runner, - p, - ssl_context=ssl_context, - backlog=backlog, - ) - ) - - if sock is not None: - if not isinstance(sock, Iterable): - sites.append( - SockSite( - runner, - sock, - ssl_context=ssl_context, - backlog=backlog, - ) - ) - else: - for s in sock: - sites.append( - SockSite( - runner, - s, - ssl_context=ssl_context, - backlog=backlog, - ) - ) - for site in sites: - await site.start() - - if print: # pragma: no branch - names = sorted(str(s.name) for s in runner.sites) - print( - "======== Running on {} ========\n" - "(Press CTRL+C to quit)".format(", ".join(names)) - ) - - # sleep forever by 1 hour intervals, - while True: - await asyncio.sleep(3600) - finally: - await runner.cleanup() - - -def _cancel_tasks( - to_cancel: Set["asyncio.Task[Any]"], loop: asyncio.AbstractEventLoop -) -> None: - if not to_cancel: - return - - for task in to_cancel: - task.cancel() - - loop.run_until_complete(asyncio.gather(*to_cancel, return_exceptions=True)) - - for task in to_cancel: - if task.cancelled(): - continue - if task.exception() is not None: - loop.call_exception_handler( - { - "message": "unhandled exception during asyncio.run() shutdown", - "exception": task.exception(), - "task": task, - } - ) - - -def run_app( - app: Union[Application, Awaitable[Application]], - *, - host: Optional[Union[str, HostSequence]] = None, - port: Optional[int] = None, - path: Union[PathLike, TypingIterable[PathLike], None] = None, - sock: Optional[Union[socket.socket, TypingIterable[socket.socket]]] = None, - shutdown_timeout: float = 60.0, - keepalive_timeout: float = 75.0, - ssl_context: Optional[SSLContext] = None, - print: Optional[Callable[..., None]] = print, - backlog: int = 128, - access_log_class: Type[AbstractAccessLogger] = AccessLogger, - access_log_format: str = AccessLogger.LOG_FORMAT, - access_log: Optional[logging.Logger] = access_logger, - handle_signals: bool = True, - reuse_address: Optional[bool] = None, - reuse_port: Optional[bool] = None, - handler_cancellation: bool = False, - loop: Optional[asyncio.AbstractEventLoop] = None, -) -> None: - """Run an app locally""" - if loop is None: - loop = asyncio.new_event_loop() - - # Configure if and only if in debugging mode and using the default logger - if loop.get_debug() and access_log and access_log.name == "aiohttp.access": - if access_log.level == logging.NOTSET: - access_log.setLevel(logging.DEBUG) - if not access_log.hasHandlers(): - access_log.addHandler(logging.StreamHandler()) - - main_task = loop.create_task( - _run_app( - app, - host=host, - port=port, - path=path, - sock=sock, - shutdown_timeout=shutdown_timeout, - keepalive_timeout=keepalive_timeout, - ssl_context=ssl_context, - print=print, - backlog=backlog, - access_log_class=access_log_class, - access_log_format=access_log_format, - access_log=access_log, - handle_signals=handle_signals, - reuse_address=reuse_address, - reuse_port=reuse_port, - handler_cancellation=handler_cancellation, - ) - ) - - try: - asyncio.set_event_loop(loop) - loop.run_until_complete(main_task) - except (GracefulExit, KeyboardInterrupt): # pragma: no cover - pass - finally: - _cancel_tasks({main_task}, loop) - _cancel_tasks(asyncio.all_tasks(loop), loop) - loop.run_until_complete(loop.shutdown_asyncgens()) - loop.close() - - -def main(argv: List[str]) -> None: - arg_parser = ArgumentParser( - description="aiohttp.web Application server", prog="aiohttp.web" - ) - arg_parser.add_argument( - "entry_func", - help=( - "Callable returning the `aiohttp.web.Application` instance to " - "run. Should be specified in the 'module:function' syntax." - ), - metavar="entry-func", - ) - arg_parser.add_argument( - "-H", - "--hostname", - help="TCP/IP hostname to serve on (default: %(default)r)", - default="localhost", - ) - arg_parser.add_argument( - "-P", - "--port", - help="TCP/IP port to serve on (default: %(default)r)", - type=int, - default="8080", - ) - arg_parser.add_argument( - "-U", - "--path", - help="Unix file system path to serve on. Specifying a path will cause " - "hostname and port arguments to be ignored.", - ) - args, extra_argv = arg_parser.parse_known_args(argv) - - # Import logic - mod_str, _, func_str = args.entry_func.partition(":") - if not func_str or not mod_str: - arg_parser.error("'entry-func' not in 'module:function' syntax") - if mod_str.startswith("."): - arg_parser.error("relative module names not supported") - try: - module = import_module(mod_str) - except ImportError as ex: - arg_parser.error(f"unable to import {mod_str}: {ex}") - try: - func = getattr(module, func_str) - except AttributeError: - arg_parser.error(f"module {mod_str!r} has no attribute {func_str!r}") - - # Compatibility logic - if args.path is not None and not hasattr(socket, "AF_UNIX"): - arg_parser.error( - "file system paths not supported by your operating" " environment" - ) - - logging.basicConfig(level=logging.DEBUG) - - app = func(extra_argv) - run_app(app, host=args.hostname, port=args.port, path=args.path) - arg_parser.exit(message="Stopped\n") - - -if __name__ == "__main__": # pragma: no branch - main(sys.argv[1:]) # pragma: no cover diff --git a/.venv/Lib/site-packages/aiohttp/web_app.py b/.venv/Lib/site-packages/aiohttp/web_app.py deleted file mode 100644 index 91bf5fd..0000000 --- a/.venv/Lib/site-packages/aiohttp/web_app.py +++ /dev/null @@ -1,596 +0,0 @@ -import asyncio -import logging -import warnings -from functools import partial, update_wrapper -from typing import ( - TYPE_CHECKING, - Any, - AsyncIterator, - Awaitable, - Callable, - Dict, - Iterable, - Iterator, - List, - Mapping, - MutableMapping, - Optional, - Sequence, - Tuple, - Type, - TypeVar, - Union, - cast, - overload, -) - -from aiosignal import Signal -from frozenlist import FrozenList - -from . import hdrs -from .abc import ( - AbstractAccessLogger, - AbstractMatchInfo, - AbstractRouter, - AbstractStreamWriter, -) -from .helpers import DEBUG, AppKey -from .http_parser import RawRequestMessage -from .log import web_logger -from .streams import StreamReader -from .typedefs import Middleware -from .web_exceptions import NotAppKeyWarning -from .web_log import AccessLogger -from .web_middlewares import _fix_request_current_app -from .web_protocol import RequestHandler -from .web_request import Request -from .web_response import StreamResponse -from .web_routedef import AbstractRouteDef -from .web_server import Server -from .web_urldispatcher import ( - AbstractResource, - AbstractRoute, - Domain, - MaskDomain, - MatchedSubAppResource, - PrefixedSubAppResource, - UrlDispatcher, -) - -__all__ = ("Application", "CleanupError") - - -if TYPE_CHECKING: - _AppSignal = Signal[Callable[["Application"], Awaitable[None]]] - _RespPrepareSignal = Signal[Callable[[Request, StreamResponse], Awaitable[None]]] - _Middlewares = FrozenList[Middleware] - _MiddlewaresHandlers = Optional[Sequence[Tuple[Middleware, bool]]] - _Subapps = List["Application"] -else: - # No type checker mode, skip types - _AppSignal = Signal - _RespPrepareSignal = Signal - _Middlewares = FrozenList - _MiddlewaresHandlers = Optional[Sequence] - _Subapps = List - -_T = TypeVar("_T") -_U = TypeVar("_U") - - -class Application(MutableMapping[Union[str, AppKey[Any]], Any]): - ATTRS = frozenset( - [ - "logger", - "_debug", - "_router", - "_loop", - "_handler_args", - "_middlewares", - "_middlewares_handlers", - "_run_middlewares", - "_state", - "_frozen", - "_pre_frozen", - "_subapps", - "_on_response_prepare", - "_on_startup", - "_on_shutdown", - "_on_cleanup", - "_client_max_size", - "_cleanup_ctx", - ] - ) - - def __init__( - self, - *, - logger: logging.Logger = web_logger, - router: Optional[UrlDispatcher] = None, - middlewares: Iterable[Middleware] = (), - handler_args: Optional[Mapping[str, Any]] = None, - client_max_size: int = 1024**2, - loop: Optional[asyncio.AbstractEventLoop] = None, - debug: Any = ..., # mypy doesn't support ellipsis - ) -> None: - if router is None: - router = UrlDispatcher() - else: - warnings.warn( - "router argument is deprecated", DeprecationWarning, stacklevel=2 - ) - assert isinstance(router, AbstractRouter), router - - if loop is not None: - warnings.warn( - "loop argument is deprecated", DeprecationWarning, stacklevel=2 - ) - - if debug is not ...: - warnings.warn( - "debug argument is deprecated", DeprecationWarning, stacklevel=2 - ) - self._debug = debug - self._router: UrlDispatcher = router - self._loop = loop - self._handler_args = handler_args - self.logger = logger - - self._middlewares: _Middlewares = FrozenList(middlewares) - - # initialized on freezing - self._middlewares_handlers: _MiddlewaresHandlers = None - # initialized on freezing - self._run_middlewares: Optional[bool] = None - - self._state: Dict[Union[AppKey[Any], str], object] = {} - self._frozen = False - self._pre_frozen = False - self._subapps: _Subapps = [] - - self._on_response_prepare: _RespPrepareSignal = Signal(self) - self._on_startup: _AppSignal = Signal(self) - self._on_shutdown: _AppSignal = Signal(self) - self._on_cleanup: _AppSignal = Signal(self) - self._cleanup_ctx = CleanupContext() - self._on_startup.append(self._cleanup_ctx._on_startup) - self._on_cleanup.append(self._cleanup_ctx._on_cleanup) - self._client_max_size = client_max_size - - def __init_subclass__(cls: Type["Application"]) -> None: - warnings.warn( - "Inheritance class {} from web.Application " - "is discouraged".format(cls.__name__), - DeprecationWarning, - stacklevel=3, - ) - - if DEBUG: # pragma: no cover - - def __setattr__(self, name: str, val: Any) -> None: - if name not in self.ATTRS: - warnings.warn( - "Setting custom web.Application.{} attribute " - "is discouraged".format(name), - DeprecationWarning, - stacklevel=2, - ) - super().__setattr__(name, val) - - # MutableMapping API - - def __eq__(self, other: object) -> bool: - return self is other - - @overload # type: ignore[override] - def __getitem__(self, key: AppKey[_T]) -> _T: - ... - - @overload - def __getitem__(self, key: str) -> Any: - ... - - def __getitem__(self, key: Union[str, AppKey[_T]]) -> Any: - return self._state[key] - - def _check_frozen(self) -> None: - if self._frozen: - warnings.warn( - "Changing state of started or joined " "application is deprecated", - DeprecationWarning, - stacklevel=3, - ) - - @overload # type: ignore[override] - def __setitem__(self, key: AppKey[_T], value: _T) -> None: - ... - - @overload - def __setitem__(self, key: str, value: Any) -> None: - ... - - def __setitem__(self, key: Union[str, AppKey[_T]], value: Any) -> None: - self._check_frozen() - if not isinstance(key, AppKey): - warnings.warn( - "It is recommended to use web.AppKey instances for keys.\n" - + "https://docs.aiohttp.org/en/stable/web_advanced.html" - + "#application-s-config", - category=NotAppKeyWarning, - stacklevel=2, - ) - self._state[key] = value - - def __delitem__(self, key: Union[str, AppKey[_T]]) -> None: - self._check_frozen() - del self._state[key] - - def __len__(self) -> int: - return len(self._state) - - def __iter__(self) -> Iterator[Union[str, AppKey[Any]]]: - return iter(self._state) - - @overload # type: ignore[override] - def get(self, key: AppKey[_T], default: None = ...) -> Optional[_T]: - ... - - @overload - def get(self, key: AppKey[_T], default: _U) -> Union[_T, _U]: - ... - - @overload - def get(self, key: str, default: Any = ...) -> Any: - ... - - def get(self, key: Union[str, AppKey[_T]], default: Any = None) -> Any: - return self._state.get(key, default) - - ######## - @property - def loop(self) -> asyncio.AbstractEventLoop: - # Technically the loop can be None - # but we mask it by explicit type cast - # to provide more convenient type annotation - warnings.warn("loop property is deprecated", DeprecationWarning, stacklevel=2) - return cast(asyncio.AbstractEventLoop, self._loop) - - def _set_loop(self, loop: Optional[asyncio.AbstractEventLoop]) -> None: - if loop is None: - loop = asyncio.get_event_loop() - if self._loop is not None and self._loop is not loop: - raise RuntimeError( - "web.Application instance initialized with different loop" - ) - - self._loop = loop - - # set loop debug - if self._debug is ...: - self._debug = loop.get_debug() - - # set loop to sub applications - for subapp in self._subapps: - subapp._set_loop(loop) - - @property - def pre_frozen(self) -> bool: - return self._pre_frozen - - def pre_freeze(self) -> None: - if self._pre_frozen: - return - - self._pre_frozen = True - self._middlewares.freeze() - self._router.freeze() - self._on_response_prepare.freeze() - self._cleanup_ctx.freeze() - self._on_startup.freeze() - self._on_shutdown.freeze() - self._on_cleanup.freeze() - self._middlewares_handlers = tuple(self._prepare_middleware()) - - # If current app and any subapp do not have middlewares avoid run all - # of the code footprint that it implies, which have a middleware - # hardcoded per app that sets up the current_app attribute. If no - # middlewares are configured the handler will receive the proper - # current_app without needing all of this code. - self._run_middlewares = True if self.middlewares else False - - for subapp in self._subapps: - subapp.pre_freeze() - self._run_middlewares = self._run_middlewares or subapp._run_middlewares - - @property - def frozen(self) -> bool: - return self._frozen - - def freeze(self) -> None: - if self._frozen: - return - - self.pre_freeze() - self._frozen = True - for subapp in self._subapps: - subapp.freeze() - - @property - def debug(self) -> bool: - warnings.warn("debug property is deprecated", DeprecationWarning, stacklevel=2) - return self._debug # type: ignore[no-any-return] - - def _reg_subapp_signals(self, subapp: "Application") -> None: - def reg_handler(signame: str) -> None: - subsig = getattr(subapp, signame) - - async def handler(app: "Application") -> None: - await subsig.send(subapp) - - appsig = getattr(self, signame) - appsig.append(handler) - - reg_handler("on_startup") - reg_handler("on_shutdown") - reg_handler("on_cleanup") - - def add_subapp(self, prefix: str, subapp: "Application") -> AbstractResource: - if not isinstance(prefix, str): - raise TypeError("Prefix must be str") - prefix = prefix.rstrip("/") - if not prefix: - raise ValueError("Prefix cannot be empty") - factory = partial(PrefixedSubAppResource, prefix, subapp) - return self._add_subapp(factory, subapp) - - def _add_subapp( - self, resource_factory: Callable[[], AbstractResource], subapp: "Application" - ) -> AbstractResource: - if self.frozen: - raise RuntimeError("Cannot add sub application to frozen application") - if subapp.frozen: - raise RuntimeError("Cannot add frozen application") - resource = resource_factory() - self.router.register_resource(resource) - self._reg_subapp_signals(subapp) - self._subapps.append(subapp) - subapp.pre_freeze() - if self._loop is not None: - subapp._set_loop(self._loop) - return resource - - def add_domain(self, domain: str, subapp: "Application") -> AbstractResource: - if not isinstance(domain, str): - raise TypeError("Domain must be str") - elif "*" in domain: - rule: Domain = MaskDomain(domain) - else: - rule = Domain(domain) - factory = partial(MatchedSubAppResource, rule, subapp) - return self._add_subapp(factory, subapp) - - def add_routes(self, routes: Iterable[AbstractRouteDef]) -> List[AbstractRoute]: - return self.router.add_routes(routes) - - @property - def on_response_prepare(self) -> _RespPrepareSignal: - return self._on_response_prepare - - @property - def on_startup(self) -> _AppSignal: - return self._on_startup - - @property - def on_shutdown(self) -> _AppSignal: - return self._on_shutdown - - @property - def on_cleanup(self) -> _AppSignal: - return self._on_cleanup - - @property - def cleanup_ctx(self) -> "CleanupContext": - return self._cleanup_ctx - - @property - def router(self) -> UrlDispatcher: - return self._router - - @property - def middlewares(self) -> _Middlewares: - return self._middlewares - - def _make_handler( - self, - *, - loop: Optional[asyncio.AbstractEventLoop] = None, - access_log_class: Type[AbstractAccessLogger] = AccessLogger, - **kwargs: Any, - ) -> Server: - - if not issubclass(access_log_class, AbstractAccessLogger): - raise TypeError( - "access_log_class must be subclass of " - "aiohttp.abc.AbstractAccessLogger, got {}".format(access_log_class) - ) - - self._set_loop(loop) - self.freeze() - - kwargs["debug"] = self._debug - kwargs["access_log_class"] = access_log_class - if self._handler_args: - for k, v in self._handler_args.items(): - kwargs[k] = v - - return Server( - self._handle, # type: ignore[arg-type] - request_factory=self._make_request, - loop=self._loop, - **kwargs, - ) - - def make_handler( - self, - *, - loop: Optional[asyncio.AbstractEventLoop] = None, - access_log_class: Type[AbstractAccessLogger] = AccessLogger, - **kwargs: Any, - ) -> Server: - - warnings.warn( - "Application.make_handler(...) is deprecated, " "use AppRunner API instead", - DeprecationWarning, - stacklevel=2, - ) - - return self._make_handler( - loop=loop, access_log_class=access_log_class, **kwargs - ) - - async def startup(self) -> None: - """Causes on_startup signal - - Should be called in the event loop along with the request handler. - """ - await self.on_startup.send(self) - - async def shutdown(self) -> None: - """Causes on_shutdown signal - - Should be called before cleanup() - """ - await self.on_shutdown.send(self) - - async def cleanup(self) -> None: - """Causes on_cleanup signal - - Should be called after shutdown() - """ - if self.on_cleanup.frozen: - await self.on_cleanup.send(self) - else: - # If an exception occurs in startup, ensure cleanup contexts are completed. - await self._cleanup_ctx._on_cleanup(self) - - def _make_request( - self, - message: RawRequestMessage, - payload: StreamReader, - protocol: RequestHandler, - writer: AbstractStreamWriter, - task: "asyncio.Task[None]", - _cls: Type[Request] = Request, - ) -> Request: - return _cls( - message, - payload, - protocol, - writer, - task, - self._loop, - client_max_size=self._client_max_size, - ) - - def _prepare_middleware(self) -> Iterator[Tuple[Middleware, bool]]: - for m in reversed(self._middlewares): - if getattr(m, "__middleware_version__", None) == 1: - yield m, True - else: - warnings.warn( - 'old-style middleware "{!r}" deprecated, ' "see #2252".format(m), - DeprecationWarning, - stacklevel=2, - ) - yield m, False - - yield _fix_request_current_app(self), True - - async def _handle(self, request: Request) -> StreamResponse: - loop = asyncio.get_event_loop() - debug = loop.get_debug() - match_info = await self._router.resolve(request) - if debug: # pragma: no cover - if not isinstance(match_info, AbstractMatchInfo): - raise TypeError( - "match_info should be AbstractMatchInfo " - "instance, not {!r}".format(match_info) - ) - match_info.add_app(self) - - match_info.freeze() - - resp = None - request._match_info = match_info - expect = request.headers.get(hdrs.EXPECT) - if expect: - resp = await match_info.expect_handler(request) - await request.writer.drain() - - if resp is None: - handler = match_info.handler - - if self._run_middlewares: - for app in match_info.apps[::-1]: - for m, new_style in app._middlewares_handlers: # type: ignore[union-attr] - if new_style: - handler = update_wrapper( - partial(m, handler=handler), handler - ) - else: - handler = await m(app, handler) # type: ignore[arg-type,assignment] - - resp = await handler(request) - - return resp - - def __call__(self) -> "Application": - """gunicorn compatibility""" - return self - - def __repr__(self) -> str: - return f"" - - def __bool__(self) -> bool: - return True - - -class CleanupError(RuntimeError): - @property - def exceptions(self) -> List[BaseException]: - return cast(List[BaseException], self.args[1]) - - -if TYPE_CHECKING: - _CleanupContextBase = FrozenList[Callable[[Application], AsyncIterator[None]]] -else: - _CleanupContextBase = FrozenList - - -class CleanupContext(_CleanupContextBase): - def __init__(self) -> None: - super().__init__() - self._exits: List[AsyncIterator[None]] = [] - - async def _on_startup(self, app: Application) -> None: - for cb in self: - it = cb(app).__aiter__() - await it.__anext__() - self._exits.append(it) - - async def _on_cleanup(self, app: Application) -> None: - errors = [] - for it in reversed(self._exits): - try: - await it.__anext__() - except StopAsyncIteration: - pass - except Exception as exc: - errors.append(exc) - else: - errors.append(RuntimeError(f"{it!r} has more than one 'yield'")) - if errors: - if len(errors) == 1: - raise errors[0] - else: - raise CleanupError("Multiple errors on cleanup stage", errors) diff --git a/.venv/Lib/site-packages/aiohttp/web_exceptions.py b/.venv/Lib/site-packages/aiohttp/web_exceptions.py deleted file mode 100644 index ee2c1e7..0000000 --- a/.venv/Lib/site-packages/aiohttp/web_exceptions.py +++ /dev/null @@ -1,452 +0,0 @@ -import warnings -from typing import Any, Dict, Iterable, List, Optional, Set # noqa - -from yarl import URL - -from .typedefs import LooseHeaders, StrOrURL -from .web_response import Response - -__all__ = ( - "HTTPException", - "HTTPError", - "HTTPRedirection", - "HTTPSuccessful", - "HTTPOk", - "HTTPCreated", - "HTTPAccepted", - "HTTPNonAuthoritativeInformation", - "HTTPNoContent", - "HTTPResetContent", - "HTTPPartialContent", - "HTTPMove", - "HTTPMultipleChoices", - "HTTPMovedPermanently", - "HTTPFound", - "HTTPSeeOther", - "HTTPNotModified", - "HTTPUseProxy", - "HTTPTemporaryRedirect", - "HTTPPermanentRedirect", - "HTTPClientError", - "HTTPBadRequest", - "HTTPUnauthorized", - "HTTPPaymentRequired", - "HTTPForbidden", - "HTTPNotFound", - "HTTPMethodNotAllowed", - "HTTPNotAcceptable", - "HTTPProxyAuthenticationRequired", - "HTTPRequestTimeout", - "HTTPConflict", - "HTTPGone", - "HTTPLengthRequired", - "HTTPPreconditionFailed", - "HTTPRequestEntityTooLarge", - "HTTPRequestURITooLong", - "HTTPUnsupportedMediaType", - "HTTPRequestRangeNotSatisfiable", - "HTTPExpectationFailed", - "HTTPMisdirectedRequest", - "HTTPUnprocessableEntity", - "HTTPFailedDependency", - "HTTPUpgradeRequired", - "HTTPPreconditionRequired", - "HTTPTooManyRequests", - "HTTPRequestHeaderFieldsTooLarge", - "HTTPUnavailableForLegalReasons", - "HTTPServerError", - "HTTPInternalServerError", - "HTTPNotImplemented", - "HTTPBadGateway", - "HTTPServiceUnavailable", - "HTTPGatewayTimeout", - "HTTPVersionNotSupported", - "HTTPVariantAlsoNegotiates", - "HTTPInsufficientStorage", - "HTTPNotExtended", - "HTTPNetworkAuthenticationRequired", -) - - -class NotAppKeyWarning(UserWarning): - """Warning when not using AppKey in Application.""" - - -############################################################ -# HTTP Exceptions -############################################################ - - -class HTTPException(Response, Exception): - - # You should set in subclasses: - # status = 200 - - status_code = -1 - empty_body = False - - __http_exception__ = True - - def __init__( - self, - *, - headers: Optional[LooseHeaders] = None, - reason: Optional[str] = None, - body: Any = None, - text: Optional[str] = None, - content_type: Optional[str] = None, - ) -> None: - if body is not None: - warnings.warn( - "body argument is deprecated for http web exceptions", - DeprecationWarning, - ) - Response.__init__( - self, - status=self.status_code, - headers=headers, - reason=reason, - body=body, - text=text, - content_type=content_type, - ) - Exception.__init__(self, self.reason) - if self.body is None and not self.empty_body: - self.text = f"{self.status}: {self.reason}" - - def __bool__(self) -> bool: - return True - - -class HTTPError(HTTPException): - """Base class for exceptions with status codes in the 400s and 500s.""" - - -class HTTPRedirection(HTTPException): - """Base class for exceptions with status codes in the 300s.""" - - -class HTTPSuccessful(HTTPException): - """Base class for exceptions with status codes in the 200s.""" - - -class HTTPOk(HTTPSuccessful): - status_code = 200 - - -class HTTPCreated(HTTPSuccessful): - status_code = 201 - - -class HTTPAccepted(HTTPSuccessful): - status_code = 202 - - -class HTTPNonAuthoritativeInformation(HTTPSuccessful): - status_code = 203 - - -class HTTPNoContent(HTTPSuccessful): - status_code = 204 - empty_body = True - - -class HTTPResetContent(HTTPSuccessful): - status_code = 205 - empty_body = True - - -class HTTPPartialContent(HTTPSuccessful): - status_code = 206 - - -############################################################ -# 3xx redirection -############################################################ - - -class HTTPMove(HTTPRedirection): - def __init__( - self, - location: StrOrURL, - *, - headers: Optional[LooseHeaders] = None, - reason: Optional[str] = None, - body: Any = None, - text: Optional[str] = None, - content_type: Optional[str] = None, - ) -> None: - if not location: - raise ValueError("HTTP redirects need a location to redirect to.") - super().__init__( - headers=headers, - reason=reason, - body=body, - text=text, - content_type=content_type, - ) - self.headers["Location"] = str(URL(location)) - self.location = location - - -class HTTPMultipleChoices(HTTPMove): - status_code = 300 - - -class HTTPMovedPermanently(HTTPMove): - status_code = 301 - - -class HTTPFound(HTTPMove): - status_code = 302 - - -# This one is safe after a POST (the redirected location will be -# retrieved with GET): -class HTTPSeeOther(HTTPMove): - status_code = 303 - - -class HTTPNotModified(HTTPRedirection): - # FIXME: this should include a date or etag header - status_code = 304 - empty_body = True - - -class HTTPUseProxy(HTTPMove): - # Not a move, but looks a little like one - status_code = 305 - - -class HTTPTemporaryRedirect(HTTPMove): - status_code = 307 - - -class HTTPPermanentRedirect(HTTPMove): - status_code = 308 - - -############################################################ -# 4xx client error -############################################################ - - -class HTTPClientError(HTTPError): - pass - - -class HTTPBadRequest(HTTPClientError): - status_code = 400 - - -class HTTPUnauthorized(HTTPClientError): - status_code = 401 - - -class HTTPPaymentRequired(HTTPClientError): - status_code = 402 - - -class HTTPForbidden(HTTPClientError): - status_code = 403 - - -class HTTPNotFound(HTTPClientError): - status_code = 404 - - -class HTTPMethodNotAllowed(HTTPClientError): - status_code = 405 - - def __init__( - self, - method: str, - allowed_methods: Iterable[str], - *, - headers: Optional[LooseHeaders] = None, - reason: Optional[str] = None, - body: Any = None, - text: Optional[str] = None, - content_type: Optional[str] = None, - ) -> None: - allow = ",".join(sorted(allowed_methods)) - super().__init__( - headers=headers, - reason=reason, - body=body, - text=text, - content_type=content_type, - ) - self.headers["Allow"] = allow - self.allowed_methods: Set[str] = set(allowed_methods) - self.method = method.upper() - - -class HTTPNotAcceptable(HTTPClientError): - status_code = 406 - - -class HTTPProxyAuthenticationRequired(HTTPClientError): - status_code = 407 - - -class HTTPRequestTimeout(HTTPClientError): - status_code = 408 - - -class HTTPConflict(HTTPClientError): - status_code = 409 - - -class HTTPGone(HTTPClientError): - status_code = 410 - - -class HTTPLengthRequired(HTTPClientError): - status_code = 411 - - -class HTTPPreconditionFailed(HTTPClientError): - status_code = 412 - - -class HTTPRequestEntityTooLarge(HTTPClientError): - status_code = 413 - - def __init__(self, max_size: float, actual_size: float, **kwargs: Any) -> None: - kwargs.setdefault( - "text", - "Maximum request body size {} exceeded, " - "actual body size {}".format(max_size, actual_size), - ) - super().__init__(**kwargs) - - -class HTTPRequestURITooLong(HTTPClientError): - status_code = 414 - - -class HTTPUnsupportedMediaType(HTTPClientError): - status_code = 415 - - -class HTTPRequestRangeNotSatisfiable(HTTPClientError): - status_code = 416 - - -class HTTPExpectationFailed(HTTPClientError): - status_code = 417 - - -class HTTPMisdirectedRequest(HTTPClientError): - status_code = 421 - - -class HTTPUnprocessableEntity(HTTPClientError): - status_code = 422 - - -class HTTPFailedDependency(HTTPClientError): - status_code = 424 - - -class HTTPUpgradeRequired(HTTPClientError): - status_code = 426 - - -class HTTPPreconditionRequired(HTTPClientError): - status_code = 428 - - -class HTTPTooManyRequests(HTTPClientError): - status_code = 429 - - -class HTTPRequestHeaderFieldsTooLarge(HTTPClientError): - status_code = 431 - - -class HTTPUnavailableForLegalReasons(HTTPClientError): - status_code = 451 - - def __init__( - self, - link: Optional[StrOrURL], - *, - headers: Optional[LooseHeaders] = None, - reason: Optional[str] = None, - body: Any = None, - text: Optional[str] = None, - content_type: Optional[str] = None, - ) -> None: - super().__init__( - headers=headers, - reason=reason, - body=body, - text=text, - content_type=content_type, - ) - self._link = None - if link: - self._link = URL(link) - self.headers["Link"] = f'<{str(self._link)}>; rel="blocked-by"' - - @property - def link(self) -> Optional[URL]: - return self._link - - -############################################################ -# 5xx Server Error -############################################################ -# Response status codes beginning with the digit "5" indicate cases in -# which the server is aware that it has erred or is incapable of -# performing the request. Except when responding to a HEAD request, the -# server SHOULD include an entity containing an explanation of the error -# situation, and whether it is a temporary or permanent condition. User -# agents SHOULD display any included entity to the user. These response -# codes are applicable to any request method. - - -class HTTPServerError(HTTPError): - pass - - -class HTTPInternalServerError(HTTPServerError): - status_code = 500 - - -class HTTPNotImplemented(HTTPServerError): - status_code = 501 - - -class HTTPBadGateway(HTTPServerError): - status_code = 502 - - -class HTTPServiceUnavailable(HTTPServerError): - status_code = 503 - - -class HTTPGatewayTimeout(HTTPServerError): - status_code = 504 - - -class HTTPVersionNotSupported(HTTPServerError): - status_code = 505 - - -class HTTPVariantAlsoNegotiates(HTTPServerError): - status_code = 506 - - -class HTTPInsufficientStorage(HTTPServerError): - status_code = 507 - - -class HTTPNotExtended(HTTPServerError): - status_code = 510 - - -class HTTPNetworkAuthenticationRequired(HTTPServerError): - status_code = 511 diff --git a/.venv/Lib/site-packages/aiohttp/web_fileresponse.py b/.venv/Lib/site-packages/aiohttp/web_fileresponse.py deleted file mode 100644 index 6496ffa..0000000 --- a/.venv/Lib/site-packages/aiohttp/web_fileresponse.py +++ /dev/null @@ -1,301 +0,0 @@ -import asyncio -import mimetypes -import os -import pathlib -from typing import ( # noqa - IO, - TYPE_CHECKING, - Any, - Awaitable, - Callable, - Final, - Iterator, - List, - Optional, - Tuple, - Union, - cast, -) - -from . import hdrs -from .abc import AbstractStreamWriter -from .helpers import ETAG_ANY, ETag, must_be_empty_body -from .typedefs import LooseHeaders, PathLike -from .web_exceptions import ( - HTTPNotModified, - HTTPPartialContent, - HTTPPreconditionFailed, - HTTPRequestRangeNotSatisfiable, -) -from .web_response import StreamResponse - -__all__ = ("FileResponse",) - -if TYPE_CHECKING: - from .web_request import BaseRequest - - -_T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]] - - -NOSENDFILE: Final[bool] = bool(os.environ.get("AIOHTTP_NOSENDFILE")) - - -class FileResponse(StreamResponse): - """A response object can be used to send files.""" - - def __init__( - self, - path: PathLike, - chunk_size: int = 256 * 1024, - status: int = 200, - reason: Optional[str] = None, - headers: Optional[LooseHeaders] = None, - ) -> None: - super().__init__(status=status, reason=reason, headers=headers) - - self._path = pathlib.Path(path) - self._chunk_size = chunk_size - - async def _sendfile_fallback( - self, writer: AbstractStreamWriter, fobj: IO[Any], offset: int, count: int - ) -> AbstractStreamWriter: - # To keep memory usage low,fobj is transferred in chunks - # controlled by the constructor's chunk_size argument. - - chunk_size = self._chunk_size - loop = asyncio.get_event_loop() - - await loop.run_in_executor(None, fobj.seek, offset) - - chunk = await loop.run_in_executor(None, fobj.read, chunk_size) - while chunk: - await writer.write(chunk) - count = count - chunk_size - if count <= 0: - break - chunk = await loop.run_in_executor(None, fobj.read, min(chunk_size, count)) - - await writer.drain() - return writer - - async def _sendfile( - self, request: "BaseRequest", fobj: IO[Any], offset: int, count: int - ) -> AbstractStreamWriter: - writer = await super().prepare(request) - assert writer is not None - - if NOSENDFILE or self.compression: - return await self._sendfile_fallback(writer, fobj, offset, count) - - loop = request._loop - transport = request.transport - assert transport is not None - - try: - await loop.sendfile(transport, fobj, offset, count) - except NotImplementedError: - return await self._sendfile_fallback(writer, fobj, offset, count) - - await super().write_eof() - return writer - - @staticmethod - def _strong_etag_match(etag_value: str, etags: Tuple[ETag, ...]) -> bool: - if len(etags) == 1 and etags[0].value == ETAG_ANY: - return True - return any(etag.value == etag_value for etag in etags if not etag.is_weak) - - async def _not_modified( - self, request: "BaseRequest", etag_value: str, last_modified: float - ) -> Optional[AbstractStreamWriter]: - self.set_status(HTTPNotModified.status_code) - self._length_check = False - self.etag = etag_value # type: ignore[assignment] - self.last_modified = last_modified # type: ignore[assignment] - # Delete any Content-Length headers provided by user. HTTP 304 - # should always have empty response body - return await super().prepare(request) - - async def _precondition_failed( - self, request: "BaseRequest" - ) -> Optional[AbstractStreamWriter]: - self.set_status(HTTPPreconditionFailed.status_code) - self.content_length = 0 - return await super().prepare(request) - - def _get_file_path_stat_and_gzip( - self, check_for_gzipped_file: bool - ) -> Tuple[pathlib.Path, os.stat_result, bool]: - """Return the file path, stat result, and gzip status. - - This method should be called from a thread executor - since it calls os.stat which may block. - """ - filepath = self._path - if check_for_gzipped_file: - gzip_path = filepath.with_name(filepath.name + ".gz") - try: - return gzip_path, gzip_path.stat(), True - except OSError: - # Fall through and try the non-gzipped file - pass - - return filepath, filepath.stat(), False - - async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]: - loop = asyncio.get_event_loop() - check_for_gzipped_file = "gzip" in request.headers.get(hdrs.ACCEPT_ENCODING, "") - filepath, st, gzip = await loop.run_in_executor( - None, self._get_file_path_stat_and_gzip, check_for_gzipped_file - ) - - etag_value = f"{st.st_mtime_ns:x}-{st.st_size:x}" - last_modified = st.st_mtime - - # https://tools.ietf.org/html/rfc7232#section-6 - ifmatch = request.if_match - if ifmatch is not None and not self._strong_etag_match(etag_value, ifmatch): - return await self._precondition_failed(request) - - unmodsince = request.if_unmodified_since - if ( - unmodsince is not None - and ifmatch is None - and st.st_mtime > unmodsince.timestamp() - ): - return await self._precondition_failed(request) - - ifnonematch = request.if_none_match - if ifnonematch is not None and self._strong_etag_match(etag_value, ifnonematch): - return await self._not_modified(request, etag_value, last_modified) - - modsince = request.if_modified_since - if ( - modsince is not None - and ifnonematch is None - and st.st_mtime <= modsince.timestamp() - ): - return await self._not_modified(request, etag_value, last_modified) - - if hdrs.CONTENT_TYPE not in self.headers: - ct, encoding = mimetypes.guess_type(str(filepath)) - if not ct: - ct = "application/octet-stream" - should_set_ct = True - else: - encoding = "gzip" if gzip else None - should_set_ct = False - - status = self._status - file_size = st.st_size - count = file_size - - start = None - - ifrange = request.if_range - if ifrange is None or st.st_mtime <= ifrange.timestamp(): - # If-Range header check: - # condition = cached date >= last modification date - # return 206 if True else 200. - # if False: - # Range header would not be processed, return 200 - # if True but Range header missing - # return 200 - try: - rng = request.http_range - start = rng.start - end = rng.stop - except ValueError: - # https://tools.ietf.org/html/rfc7233: - # A server generating a 416 (Range Not Satisfiable) response to - # a byte-range request SHOULD send a Content-Range header field - # with an unsatisfied-range value. - # The complete-length in a 416 response indicates the current - # length of the selected representation. - # - # Will do the same below. Many servers ignore this and do not - # send a Content-Range header with HTTP 416 - self.headers[hdrs.CONTENT_RANGE] = f"bytes */{file_size}" - self.set_status(HTTPRequestRangeNotSatisfiable.status_code) - return await super().prepare(request) - - # If a range request has been made, convert start, end slice - # notation into file pointer offset and count - if start is not None or end is not None: - if start < 0 and end is None: # return tail of file - start += file_size - if start < 0: - # if Range:bytes=-1000 in request header but file size - # is only 200, there would be trouble without this - start = 0 - count = file_size - start - else: - # rfc7233:If the last-byte-pos value is - # absent, or if the value is greater than or equal to - # the current length of the representation data, - # the byte range is interpreted as the remainder - # of the representation (i.e., the server replaces the - # value of last-byte-pos with a value that is one less than - # the current length of the selected representation). - count = ( - min(end if end is not None else file_size, file_size) - start - ) - - if start >= file_size: - # HTTP 416 should be returned in this case. - # - # According to https://tools.ietf.org/html/rfc7233: - # If a valid byte-range-set includes at least one - # byte-range-spec with a first-byte-pos that is less than - # the current length of the representation, or at least one - # suffix-byte-range-spec with a non-zero suffix-length, - # then the byte-range-set is satisfiable. Otherwise, the - # byte-range-set is unsatisfiable. - self.headers[hdrs.CONTENT_RANGE] = f"bytes */{file_size}" - self.set_status(HTTPRequestRangeNotSatisfiable.status_code) - return await super().prepare(request) - - status = HTTPPartialContent.status_code - # Even though you are sending the whole file, you should still - # return a HTTP 206 for a Range request. - self.set_status(status) - - if should_set_ct: - self.content_type = ct # type: ignore[assignment] - if encoding: - self.headers[hdrs.CONTENT_ENCODING] = encoding - if gzip: - self.headers[hdrs.VARY] = hdrs.ACCEPT_ENCODING - # Disable compression if we are already sending - # a compressed file since we don't want to double - # compress. - self._compression = False - - self.etag = etag_value # type: ignore[assignment] - self.last_modified = st.st_mtime # type: ignore[assignment] - self.content_length = count - - self.headers[hdrs.ACCEPT_RANGES] = "bytes" - - real_start = cast(int, start) - - if status == HTTPPartialContent.status_code: - self.headers[hdrs.CONTENT_RANGE] = "bytes {}-{}/{}".format( - real_start, real_start + count - 1, file_size - ) - - # If we are sending 0 bytes calling sendfile() will throw a ValueError - if count == 0 or must_be_empty_body(request.method, self.status): - return await super().prepare(request) - - fobj = await loop.run_in_executor(None, filepath.open, "rb") - if start: # be aware that start could be None or int=0 here. - offset = start - else: - offset = 0 - - try: - return await self._sendfile(request, fobj, offset, count) - finally: - await asyncio.shield(loop.run_in_executor(None, fobj.close)) diff --git a/.venv/Lib/site-packages/aiohttp/web_log.py b/.venv/Lib/site-packages/aiohttp/web_log.py deleted file mode 100644 index 633e9e3..0000000 --- a/.venv/Lib/site-packages/aiohttp/web_log.py +++ /dev/null @@ -1,213 +0,0 @@ -import datetime -import functools -import logging -import os -import re -import time as time_mod -from collections import namedtuple -from typing import Any, Callable, Dict, Iterable, List, Tuple # noqa - -from .abc import AbstractAccessLogger -from .web_request import BaseRequest -from .web_response import StreamResponse - -KeyMethod = namedtuple("KeyMethod", "key method") - - -class AccessLogger(AbstractAccessLogger): - """Helper object to log access. - - Usage: - log = logging.getLogger("spam") - log_format = "%a %{User-Agent}i" - access_logger = AccessLogger(log, log_format) - access_logger.log(request, response, time) - - Format: - %% The percent sign - %a Remote IP-address (IP-address of proxy if using reverse proxy) - %t Time when the request was started to process - %P The process ID of the child that serviced the request - %r First line of request - %s Response status code - %b Size of response in bytes, including HTTP headers - %T Time taken to serve the request, in seconds - %Tf Time taken to serve the request, in seconds with floating fraction - in .06f format - %D Time taken to serve the request, in microseconds - %{FOO}i request.headers['FOO'] - %{FOO}o response.headers['FOO'] - %{FOO}e os.environ['FOO'] - - """ - - LOG_FORMAT_MAP = { - "a": "remote_address", - "t": "request_start_time", - "P": "process_id", - "r": "first_request_line", - "s": "response_status", - "b": "response_size", - "T": "request_time", - "Tf": "request_time_frac", - "D": "request_time_micro", - "i": "request_header", - "o": "response_header", - } - - LOG_FORMAT = '%a %t "%r" %s %b "%{Referer}i" "%{User-Agent}i"' - FORMAT_RE = re.compile(r"%(\{([A-Za-z0-9\-_]+)\}([ioe])|[atPrsbOD]|Tf?)") - CLEANUP_RE = re.compile(r"(%[^s])") - _FORMAT_CACHE: Dict[str, Tuple[str, List[KeyMethod]]] = {} - - def __init__(self, logger: logging.Logger, log_format: str = LOG_FORMAT) -> None: - """Initialise the logger. - - logger is a logger object to be used for logging. - log_format is a string with apache compatible log format description. - - """ - super().__init__(logger, log_format=log_format) - - _compiled_format = AccessLogger._FORMAT_CACHE.get(log_format) - if not _compiled_format: - _compiled_format = self.compile_format(log_format) - AccessLogger._FORMAT_CACHE[log_format] = _compiled_format - - self._log_format, self._methods = _compiled_format - - def compile_format(self, log_format: str) -> Tuple[str, List[KeyMethod]]: - """Translate log_format into form usable by modulo formatting - - All known atoms will be replaced with %s - Also methods for formatting of those atoms will be added to - _methods in appropriate order - - For example we have log_format = "%a %t" - This format will be translated to "%s %s" - Also contents of _methods will be - [self._format_a, self._format_t] - These method will be called and results will be passed - to translated string format. - - Each _format_* method receive 'args' which is list of arguments - given to self.log - - Exceptions are _format_e, _format_i and _format_o methods which - also receive key name (by functools.partial) - - """ - # list of (key, method) tuples, we don't use an OrderedDict as users - # can repeat the same key more than once - methods = list() - - for atom in self.FORMAT_RE.findall(log_format): - if atom[1] == "": - format_key1 = self.LOG_FORMAT_MAP[atom[0]] - m = getattr(AccessLogger, "_format_%s" % atom[0]) - key_method = KeyMethod(format_key1, m) - else: - format_key2 = (self.LOG_FORMAT_MAP[atom[2]], atom[1]) - m = getattr(AccessLogger, "_format_%s" % atom[2]) - key_method = KeyMethod(format_key2, functools.partial(m, atom[1])) - - methods.append(key_method) - - log_format = self.FORMAT_RE.sub(r"%s", log_format) - log_format = self.CLEANUP_RE.sub(r"%\1", log_format) - return log_format, methods - - @staticmethod - def _format_i( - key: str, request: BaseRequest, response: StreamResponse, time: float - ) -> str: - if request is None: - return "(no headers)" - - # suboptimal, make istr(key) once - return request.headers.get(key, "-") - - @staticmethod - def _format_o( - key: str, request: BaseRequest, response: StreamResponse, time: float - ) -> str: - # suboptimal, make istr(key) once - return response.headers.get(key, "-") - - @staticmethod - def _format_a(request: BaseRequest, response: StreamResponse, time: float) -> str: - if request is None: - return "-" - ip = request.remote - return ip if ip is not None else "-" - - @staticmethod - def _format_t(request: BaseRequest, response: StreamResponse, time: float) -> str: - tz = datetime.timezone(datetime.timedelta(seconds=-time_mod.timezone)) - now = datetime.datetime.now(tz) - start_time = now - datetime.timedelta(seconds=time) - return start_time.strftime("[%d/%b/%Y:%H:%M:%S %z]") - - @staticmethod - def _format_P(request: BaseRequest, response: StreamResponse, time: float) -> str: - return "<%s>" % os.getpid() - - @staticmethod - def _format_r(request: BaseRequest, response: StreamResponse, time: float) -> str: - if request is None: - return "-" - return "{} {} HTTP/{}.{}".format( - request.method, - request.path_qs, - request.version.major, - request.version.minor, - ) - - @staticmethod - def _format_s(request: BaseRequest, response: StreamResponse, time: float) -> int: - return response.status - - @staticmethod - def _format_b(request: BaseRequest, response: StreamResponse, time: float) -> int: - return response.body_length - - @staticmethod - def _format_T(request: BaseRequest, response: StreamResponse, time: float) -> str: - return str(round(time)) - - @staticmethod - def _format_Tf(request: BaseRequest, response: StreamResponse, time: float) -> str: - return "%06f" % time - - @staticmethod - def _format_D(request: BaseRequest, response: StreamResponse, time: float) -> str: - return str(round(time * 1000000)) - - def _format_line( - self, request: BaseRequest, response: StreamResponse, time: float - ) -> Iterable[Tuple[str, Callable[[BaseRequest, StreamResponse, float], str]]]: - return [(key, method(request, response, time)) for key, method in self._methods] - - def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None: - if not self.logger.isEnabledFor(logging.INFO): - # Avoid formatting the log line if it will not be emitted. - return - try: - fmt_info = self._format_line(request, response, time) - - values = list() - extra = dict() - for key, value in fmt_info: - values.append(value) - - if key.__class__ is str: - extra[key] = value - else: - k1, k2 = key # type: ignore[misc] - dct = extra.get(k1, {}) # type: ignore[var-annotated,has-type] - dct[k2] = value # type: ignore[index,has-type] - extra[k1] = dct # type: ignore[has-type,assignment] - - self.logger.info(self._log_format % tuple(values), extra=extra) - except Exception: - self.logger.exception("Error in logging") diff --git a/.venv/Lib/site-packages/aiohttp/web_middlewares.py b/.venv/Lib/site-packages/aiohttp/web_middlewares.py deleted file mode 100644 index 5da1533..0000000 --- a/.venv/Lib/site-packages/aiohttp/web_middlewares.py +++ /dev/null @@ -1,116 +0,0 @@ -import re -from typing import TYPE_CHECKING, Tuple, Type, TypeVar - -from .typedefs import Handler, Middleware -from .web_exceptions import HTTPMove, HTTPPermanentRedirect -from .web_request import Request -from .web_response import StreamResponse -from .web_urldispatcher import SystemRoute - -__all__ = ( - "middleware", - "normalize_path_middleware", -) - -if TYPE_CHECKING: - from .web_app import Application - -_Func = TypeVar("_Func") - - -async def _check_request_resolves(request: Request, path: str) -> Tuple[bool, Request]: - alt_request = request.clone(rel_url=path) - - match_info = await request.app.router.resolve(alt_request) - alt_request._match_info = match_info - - if match_info.http_exception is None: - return True, alt_request - - return False, request - - -def middleware(f: _Func) -> _Func: - f.__middleware_version__ = 1 # type: ignore[attr-defined] - return f - - -def normalize_path_middleware( - *, - append_slash: bool = True, - remove_slash: bool = False, - merge_slashes: bool = True, - redirect_class: Type[HTTPMove] = HTTPPermanentRedirect, -) -> Middleware: - """Factory for producing a middleware that normalizes the path of a request. - - Normalizing means: - - Add or remove a trailing slash to the path. - - Double slashes are replaced by one. - - The middleware returns as soon as it finds a path that resolves - correctly. The order if both merge and append/remove are enabled is - 1) merge slashes - 2) append/remove slash - 3) both merge slashes and append/remove slash. - If the path resolves with at least one of those conditions, it will - redirect to the new path. - - Only one of `append_slash` and `remove_slash` can be enabled. If both - are `True` the factory will raise an assertion error - - If `append_slash` is `True` the middleware will append a slash when - needed. If a resource is defined with trailing slash and the request - comes without it, it will append it automatically. - - If `remove_slash` is `True`, `append_slash` must be `False`. When enabled - the middleware will remove trailing slashes and redirect if the resource - is defined - - If merge_slashes is True, merge multiple consecutive slashes in the - path into one. - """ - correct_configuration = not (append_slash and remove_slash) - assert correct_configuration, "Cannot both remove and append slash" - - @middleware - async def impl(request: Request, handler: Handler) -> StreamResponse: - if isinstance(request.match_info.route, SystemRoute): - paths_to_check = [] - if "?" in request.raw_path: - path, query = request.raw_path.split("?", 1) - query = "?" + query - else: - query = "" - path = request.raw_path - - if merge_slashes: - paths_to_check.append(re.sub("//+", "/", path)) - if append_slash and not request.path.endswith("/"): - paths_to_check.append(path + "/") - if remove_slash and request.path.endswith("/"): - paths_to_check.append(path[:-1]) - if merge_slashes and append_slash: - paths_to_check.append(re.sub("//+", "/", path + "/")) - if merge_slashes and remove_slash: - merged_slashes = re.sub("//+", "/", path) - paths_to_check.append(merged_slashes[:-1]) - - for path in paths_to_check: - path = re.sub("^//+", "/", path) # SECURITY: GHSA-v6wp-4m6f-gcjg - resolves, request = await _check_request_resolves(request, path) - if resolves: - raise redirect_class(request.raw_path + query) - - return await handler(request) - - return impl - - -def _fix_request_current_app(app: "Application") -> Middleware: - @middleware - async def impl(request: Request, handler: Handler) -> StreamResponse: - with request.match_info.set_current_app(app): - return await handler(request) - - return impl diff --git a/.venv/Lib/site-packages/aiohttp/web_protocol.py b/.venv/Lib/site-packages/aiohttp/web_protocol.py deleted file mode 100644 index ec5856a..0000000 --- a/.venv/Lib/site-packages/aiohttp/web_protocol.py +++ /dev/null @@ -1,698 +0,0 @@ -import asyncio -import asyncio.streams -import traceback -import warnings -from collections import deque -from contextlib import suppress -from html import escape as html_escape -from http import HTTPStatus -from logging import Logger -from typing import ( - TYPE_CHECKING, - Any, - Awaitable, - Callable, - Deque, - Optional, - Sequence, - Tuple, - Type, - Union, - cast, -) - -import attr -import yarl - -from .abc import AbstractAccessLogger, AbstractStreamWriter -from .base_protocol import BaseProtocol -from .helpers import ceil_timeout -from .http import ( - HttpProcessingError, - HttpRequestParser, - HttpVersion10, - RawRequestMessage, - StreamWriter, -) -from .log import access_logger, server_logger -from .streams import EMPTY_PAYLOAD, StreamReader -from .tcp_helpers import tcp_keepalive -from .web_exceptions import HTTPException -from .web_log import AccessLogger -from .web_request import BaseRequest -from .web_response import Response, StreamResponse - -__all__ = ("RequestHandler", "RequestPayloadError", "PayloadAccessError") - -if TYPE_CHECKING: - from .web_server import Server - - -_RequestFactory = Callable[ - [ - RawRequestMessage, - StreamReader, - "RequestHandler", - AbstractStreamWriter, - "asyncio.Task[None]", - ], - BaseRequest, -] - -_RequestHandler = Callable[[BaseRequest], Awaitable[StreamResponse]] - -ERROR = RawRequestMessage( - "UNKNOWN", - "/", - HttpVersion10, - {}, # type: ignore[arg-type] - {}, # type: ignore[arg-type] - True, - None, - False, - False, - yarl.URL("/"), -) - - -class RequestPayloadError(Exception): - """Payload parsing error.""" - - -class PayloadAccessError(Exception): - """Payload was accessed after response was sent.""" - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class _ErrInfo: - status: int - exc: BaseException - message: str - - -_MsgType = Tuple[Union[RawRequestMessage, _ErrInfo], StreamReader] - - -class RequestHandler(BaseProtocol): - """HTTP protocol implementation. - - RequestHandler handles incoming HTTP request. It reads request line, - request headers and request payload and calls handle_request() method. - By default it always returns with 404 response. - - RequestHandler handles errors in incoming request, like bad - status line, bad headers or incomplete payload. If any error occurs, - connection gets closed. - - keepalive_timeout -- number of seconds before closing - keep-alive connection - - tcp_keepalive -- TCP keep-alive is on, default is on - - debug -- enable debug mode - - logger -- custom logger object - - access_log_class -- custom class for access_logger - - access_log -- custom logging object - - access_log_format -- access log format string - - loop -- Optional event loop - - max_line_size -- Optional maximum header line size - - max_field_size -- Optional maximum header field size - - max_headers -- Optional maximum header size - - timeout_ceil_threshold -- Optional value to specify - threshold to ceil() timeout - values - - """ - - KEEPALIVE_RESCHEDULE_DELAY = 1 - - __slots__ = ( - "_request_count", - "_keepalive", - "_manager", - "_request_handler", - "_request_factory", - "_tcp_keepalive", - "_keepalive_time", - "_keepalive_handle", - "_keepalive_timeout", - "_lingering_time", - "_messages", - "_message_tail", - "_waiter", - "_task_handler", - "_upgrade", - "_payload_parser", - "_request_parser", - "_reading_paused", - "logger", - "debug", - "access_log", - "access_logger", - "_close", - "_force_close", - "_current_request", - "_timeout_ceil_threshold", - ) - - def __init__( - self, - manager: "Server", - *, - loop: asyncio.AbstractEventLoop, - keepalive_timeout: float = 75.0, # NGINX default is 75 secs - tcp_keepalive: bool = True, - logger: Logger = server_logger, - access_log_class: Type[AbstractAccessLogger] = AccessLogger, - access_log: Logger = access_logger, - access_log_format: str = AccessLogger.LOG_FORMAT, - debug: bool = False, - max_line_size: int = 8190, - max_headers: int = 32768, - max_field_size: int = 8190, - lingering_time: float = 10.0, - read_bufsize: int = 2**16, - auto_decompress: bool = True, - timeout_ceil_threshold: float = 5, - ): - super().__init__(loop) - - self._request_count = 0 - self._keepalive = False - self._current_request: Optional[BaseRequest] = None - self._manager: Optional[Server] = manager - self._request_handler: Optional[_RequestHandler] = manager.request_handler - self._request_factory: Optional[_RequestFactory] = manager.request_factory - - self._tcp_keepalive = tcp_keepalive - # placeholder to be replaced on keepalive timeout setup - self._keepalive_time = 0.0 - self._keepalive_handle: Optional[asyncio.Handle] = None - self._keepalive_timeout = keepalive_timeout - self._lingering_time = float(lingering_time) - - self._messages: Deque[_MsgType] = deque() - self._message_tail = b"" - - self._waiter: Optional[asyncio.Future[None]] = None - self._task_handler: Optional[asyncio.Task[None]] = None - - self._upgrade = False - self._payload_parser: Any = None - self._request_parser: Optional[HttpRequestParser] = HttpRequestParser( - self, - loop, - read_bufsize, - max_line_size=max_line_size, - max_field_size=max_field_size, - max_headers=max_headers, - payload_exception=RequestPayloadError, - auto_decompress=auto_decompress, - ) - - self._timeout_ceil_threshold: float = 5 - try: - self._timeout_ceil_threshold = float(timeout_ceil_threshold) - except (TypeError, ValueError): - pass - - self.logger = logger - self.debug = debug - self.access_log = access_log - if access_log: - self.access_logger: Optional[AbstractAccessLogger] = access_log_class( - access_log, access_log_format - ) - else: - self.access_logger = None - - self._close = False - self._force_close = False - - def __repr__(self) -> str: - return "<{} {}>".format( - self.__class__.__name__, - "connected" if self.transport is not None else "disconnected", - ) - - @property - def keepalive_timeout(self) -> float: - return self._keepalive_timeout - - async def shutdown(self, timeout: Optional[float] = 15.0) -> None: - """Do worker process exit preparations. - - We need to clean up everything and stop accepting requests. - It is especially important for keep-alive connections. - """ - self._force_close = True - - if self._keepalive_handle is not None: - self._keepalive_handle.cancel() - - if self._waiter: - self._waiter.cancel() - - # wait for handlers - with suppress(asyncio.CancelledError, asyncio.TimeoutError): - async with ceil_timeout(timeout): - if self._current_request is not None: - self._current_request._cancel(asyncio.CancelledError()) - - if self._task_handler is not None and not self._task_handler.done(): - await self._task_handler - - # force-close non-idle handler - if self._task_handler is not None: - self._task_handler.cancel() - - if self.transport is not None: - self.transport.close() - self.transport = None - - def connection_made(self, transport: asyncio.BaseTransport) -> None: - super().connection_made(transport) - - real_transport = cast(asyncio.Transport, transport) - if self._tcp_keepalive: - tcp_keepalive(real_transport) - - self._task_handler = self._loop.create_task(self.start()) - assert self._manager is not None - self._manager.connection_made(self, real_transport) - - def connection_lost(self, exc: Optional[BaseException]) -> None: - if self._manager is None: - return - self._manager.connection_lost(self, exc) - - super().connection_lost(exc) - - # Grab value before setting _manager to None. - handler_cancellation = self._manager.handler_cancellation - - self._manager = None - self._force_close = True - self._request_factory = None - self._request_handler = None - self._request_parser = None - - if self._keepalive_handle is not None: - self._keepalive_handle.cancel() - - if self._current_request is not None: - if exc is None: - exc = ConnectionResetError("Connection lost") - self._current_request._cancel(exc) - - if self._waiter is not None: - self._waiter.cancel() - - if handler_cancellation and self._task_handler is not None: - self._task_handler.cancel() - - self._task_handler = None - - if self._payload_parser is not None: - self._payload_parser.feed_eof() - self._payload_parser = None - - def set_parser(self, parser: Any) -> None: - # Actual type is WebReader - assert self._payload_parser is None - - self._payload_parser = parser - - if self._message_tail: - self._payload_parser.feed_data(self._message_tail) - self._message_tail = b"" - - def eof_received(self) -> None: - pass - - def data_received(self, data: bytes) -> None: - if self._force_close or self._close: - return - # parse http messages - messages: Sequence[_MsgType] - if self._payload_parser is None and not self._upgrade: - assert self._request_parser is not None - try: - messages, upgraded, tail = self._request_parser.feed_data(data) - except HttpProcessingError as exc: - messages = [ - (_ErrInfo(status=400, exc=exc, message=exc.message), EMPTY_PAYLOAD) - ] - upgraded = False - tail = b"" - - for msg, payload in messages or (): - self._request_count += 1 - self._messages.append((msg, payload)) - - waiter = self._waiter - if messages and waiter is not None and not waiter.done(): - # don't set result twice - waiter.set_result(None) - - self._upgrade = upgraded - if upgraded and tail: - self._message_tail = tail - - # no parser, just store - elif self._payload_parser is None and self._upgrade and data: - self._message_tail += data - - # feed payload - elif data: - eof, tail = self._payload_parser.feed_data(data) - if eof: - self.close() - - def keep_alive(self, val: bool) -> None: - """Set keep-alive connection mode. - - :param bool val: new state. - """ - self._keepalive = val - if self._keepalive_handle: - self._keepalive_handle.cancel() - self._keepalive_handle = None - - def close(self) -> None: - """Close connection. - - Stop accepting new pipelining messages and close - connection when handlers done processing messages. - """ - self._close = True - if self._waiter: - self._waiter.cancel() - - def force_close(self) -> None: - """Forcefully close connection.""" - self._force_close = True - if self._waiter: - self._waiter.cancel() - if self.transport is not None: - self.transport.close() - self.transport = None - - def log_access( - self, request: BaseRequest, response: StreamResponse, time: float - ) -> None: - if self.access_logger is not None: - self.access_logger.log(request, response, self._loop.time() - time) - - def log_debug(self, *args: Any, **kw: Any) -> None: - if self.debug: - self.logger.debug(*args, **kw) - - def log_exception(self, *args: Any, **kw: Any) -> None: - self.logger.exception(*args, **kw) - - def _process_keepalive(self) -> None: - if self._force_close or not self._keepalive: - return - - next = self._keepalive_time + self._keepalive_timeout - - # handler in idle state - if self._waiter: - if self._loop.time() > next: - self.force_close() - return - - # not all request handlers are done, - # reschedule itself to next second - self._keepalive_handle = self._loop.call_later( - self.KEEPALIVE_RESCHEDULE_DELAY, - self._process_keepalive, - ) - - async def _handle_request( - self, - request: BaseRequest, - start_time: float, - request_handler: Callable[[BaseRequest], Awaitable[StreamResponse]], - ) -> Tuple[StreamResponse, bool]: - assert self._request_handler is not None - try: - try: - self._current_request = request - resp = await request_handler(request) - finally: - self._current_request = None - except HTTPException as exc: - resp = exc - reset = await self.finish_response(request, resp, start_time) - except asyncio.CancelledError: - raise - except asyncio.TimeoutError as exc: - self.log_debug("Request handler timed out.", exc_info=exc) - resp = self.handle_error(request, 504) - reset = await self.finish_response(request, resp, start_time) - except Exception as exc: - resp = self.handle_error(request, 500, exc) - reset = await self.finish_response(request, resp, start_time) - else: - # Deprecation warning (See #2415) - if getattr(resp, "__http_exception__", False): - warnings.warn( - "returning HTTPException object is deprecated " - "(#2415) and will be removed, " - "please raise the exception instead", - DeprecationWarning, - ) - - reset = await self.finish_response(request, resp, start_time) - - return resp, reset - - async def start(self) -> None: - """Process incoming request. - - It reads request line, request headers and request payload, then - calls handle_request() method. Subclass has to override - handle_request(). start() handles various exceptions in request - or response handling. Connection is being closed always unless - keep_alive(True) specified. - """ - loop = self._loop - handler = self._task_handler - assert handler is not None - manager = self._manager - assert manager is not None - keepalive_timeout = self._keepalive_timeout - resp = None - assert self._request_factory is not None - assert self._request_handler is not None - - while not self._force_close: - if not self._messages: - try: - # wait for next request - self._waiter = loop.create_future() - await self._waiter - except asyncio.CancelledError: - break - finally: - self._waiter = None - - message, payload = self._messages.popleft() - - start = loop.time() - - manager.requests_count += 1 - writer = StreamWriter(self, loop) - if isinstance(message, _ErrInfo): - # make request_factory work - request_handler = self._make_error_handler(message) - message = ERROR - else: - request_handler = self._request_handler - - request = self._request_factory(message, payload, self, writer, handler) - try: - # a new task is used for copy context vars (#3406) - task = self._loop.create_task( - self._handle_request(request, start, request_handler) - ) - try: - resp, reset = await task - except (asyncio.CancelledError, ConnectionError): - self.log_debug("Ignored premature client disconnection") - break - - # Drop the processed task from asyncio.Task.all_tasks() early - del task - if reset: - self.log_debug("Ignored premature client disconnection 2") - break - - # notify server about keep-alive - self._keepalive = bool(resp.keep_alive) - - # check payload - if not payload.is_eof(): - lingering_time = self._lingering_time - if not self._force_close and lingering_time: - self.log_debug( - "Start lingering close timer for %s sec.", lingering_time - ) - - now = loop.time() - end_t = now + lingering_time - - with suppress(asyncio.TimeoutError, asyncio.CancelledError): - while not payload.is_eof() and now < end_t: - async with ceil_timeout(end_t - now): - # read and ignore - await payload.readany() - now = loop.time() - - # if payload still uncompleted - if not payload.is_eof() and not self._force_close: - self.log_debug("Uncompleted request.") - self.close() - - payload.set_exception(PayloadAccessError()) - - except asyncio.CancelledError: - self.log_debug("Ignored premature client disconnection ") - break - except RuntimeError as exc: - if self.debug: - self.log_exception("Unhandled runtime exception", exc_info=exc) - self.force_close() - except Exception as exc: - self.log_exception("Unhandled exception", exc_info=exc) - self.force_close() - finally: - if self.transport is None and resp is not None: - self.log_debug("Ignored premature client disconnection.") - elif not self._force_close: - if self._keepalive and not self._close: - # start keep-alive timer - if keepalive_timeout is not None: - now = self._loop.time() - self._keepalive_time = now - if self._keepalive_handle is None: - self._keepalive_handle = loop.call_at( - now + keepalive_timeout, self._process_keepalive - ) - else: - break - - # remove handler, close transport if no handlers left - if not self._force_close: - self._task_handler = None - if self.transport is not None: - self.transport.close() - - async def finish_response( - self, request: BaseRequest, resp: StreamResponse, start_time: float - ) -> bool: - """Prepare the response and write_eof, then log access. - - This has to - be called within the context of any exception so the access logger - can get exception information. Returns True if the client disconnects - prematurely. - """ - if self._request_parser is not None: - self._request_parser.set_upgraded(False) - self._upgrade = False - if self._message_tail: - self._request_parser.feed_data(self._message_tail) - self._message_tail = b"" - try: - prepare_meth = resp.prepare - except AttributeError: - if resp is None: - raise RuntimeError("Missing return " "statement on request handler") - else: - raise RuntimeError( - "Web-handler should return " - "a response instance, " - "got {!r}".format(resp) - ) - try: - await prepare_meth(request) - await resp.write_eof() - except ConnectionError: - self.log_access(request, resp, start_time) - return True - else: - self.log_access(request, resp, start_time) - return False - - def handle_error( - self, - request: BaseRequest, - status: int = 500, - exc: Optional[BaseException] = None, - message: Optional[str] = None, - ) -> StreamResponse: - """Handle errors. - - Returns HTTP response with specific status code. Logs additional - information. It always closes current connection. - """ - self.log_exception("Error handling request", exc_info=exc) - - # some data already got sent, connection is broken - if request.writer.output_size > 0: - raise ConnectionError( - "Response is sent already, cannot send another response " - "with the error message" - ) - - ct = "text/plain" - if status == HTTPStatus.INTERNAL_SERVER_ERROR: - title = "{0.value} {0.phrase}".format(HTTPStatus.INTERNAL_SERVER_ERROR) - msg = HTTPStatus.INTERNAL_SERVER_ERROR.description - tb = None - if self.debug: - with suppress(Exception): - tb = traceback.format_exc() - - if "text/html" in request.headers.get("Accept", ""): - if tb: - tb = html_escape(tb) - msg = f"

Traceback:

\n
{tb}
" - message = ( - "" - "{title}" - "\n

{title}

" - "\n{msg}\n\n" - ).format(title=title, msg=msg) - ct = "text/html" - else: - if tb: - msg = tb - message = title + "\n\n" + msg - - resp = Response(status=status, text=message, content_type=ct) - resp.force_close() - - return resp - - def _make_error_handler( - self, err_info: _ErrInfo - ) -> Callable[[BaseRequest], Awaitable[StreamResponse]]: - async def handler(request: BaseRequest) -> StreamResponse: - return self.handle_error( - request, err_info.status, err_info.exc, err_info.message - ) - - return handler diff --git a/.venv/Lib/site-packages/aiohttp/web_request.py b/.venv/Lib/site-packages/aiohttp/web_request.py deleted file mode 100644 index 61fc831..0000000 --- a/.venv/Lib/site-packages/aiohttp/web_request.py +++ /dev/null @@ -1,898 +0,0 @@ -import asyncio -import datetime -import io -import re -import socket -import string -import tempfile -import types -import warnings -from http.cookies import SimpleCookie -from types import MappingProxyType -from typing import ( - TYPE_CHECKING, - Any, - Dict, - Final, - Iterator, - Mapping, - MutableMapping, - Optional, - Pattern, - Tuple, - Union, - cast, -) -from urllib.parse import parse_qsl - -import attr -from multidict import ( - CIMultiDict, - CIMultiDictProxy, - MultiDict, - MultiDictProxy, - MultiMapping, -) -from yarl import URL - -from . import hdrs -from .abc import AbstractStreamWriter -from .helpers import ( - _SENTINEL, - DEBUG, - ETAG_ANY, - LIST_QUOTED_ETAG_RE, - ChainMapProxy, - ETag, - HeadersMixin, - parse_http_date, - reify, - sentinel, -) -from .http_parser import RawRequestMessage -from .http_writer import HttpVersion -from .multipart import BodyPartReader, MultipartReader -from .streams import EmptyStreamReader, StreamReader -from .typedefs import ( - DEFAULT_JSON_DECODER, - JSONDecoder, - LooseHeaders, - RawHeaders, - StrOrURL, -) -from .web_exceptions import HTTPRequestEntityTooLarge -from .web_response import StreamResponse - -__all__ = ("BaseRequest", "FileField", "Request") - - -if TYPE_CHECKING: - from .web_app import Application - from .web_protocol import RequestHandler - from .web_urldispatcher import UrlMappingMatchInfo - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class FileField: - name: str - filename: str - file: io.BufferedReader - content_type: str - headers: "CIMultiDictProxy[str]" - - -_TCHAR: Final[str] = string.digits + string.ascii_letters + r"!#$%&'*+.^_`|~-" -# '-' at the end to prevent interpretation as range in a char class - -_TOKEN: Final[str] = rf"[{_TCHAR}]+" - -_QDTEXT: Final[str] = r"[{}]".format( - r"".join(chr(c) for c in (0x09, 0x20, 0x21) + tuple(range(0x23, 0x7F))) -) -# qdtext includes 0x5C to escape 0x5D ('\]') -# qdtext excludes obs-text (because obsoleted, and encoding not specified) - -_QUOTED_PAIR: Final[str] = r"\\[\t !-~]" - -_QUOTED_STRING: Final[str] = r'"(?:{quoted_pair}|{qdtext})*"'.format( - qdtext=_QDTEXT, quoted_pair=_QUOTED_PAIR -) - -_FORWARDED_PAIR: Final[ - str -] = r"({token})=({token}|{quoted_string})(:\d{{1,4}})?".format( - token=_TOKEN, quoted_string=_QUOTED_STRING -) - -_QUOTED_PAIR_REPLACE_RE: Final[Pattern[str]] = re.compile(r"\\([\t !-~])") -# same pattern as _QUOTED_PAIR but contains a capture group - -_FORWARDED_PAIR_RE: Final[Pattern[str]] = re.compile(_FORWARDED_PAIR) - -############################################################ -# HTTP Request -############################################################ - - -class BaseRequest(MutableMapping[str, Any], HeadersMixin): - - POST_METHODS = { - hdrs.METH_PATCH, - hdrs.METH_POST, - hdrs.METH_PUT, - hdrs.METH_TRACE, - hdrs.METH_DELETE, - } - - ATTRS = HeadersMixin.ATTRS | frozenset( - [ - "_message", - "_protocol", - "_payload_writer", - "_payload", - "_headers", - "_method", - "_version", - "_rel_url", - "_post", - "_read_bytes", - "_state", - "_cache", - "_task", - "_client_max_size", - "_loop", - "_transport_sslcontext", - "_transport_peername", - ] - ) - - def __init__( - self, - message: RawRequestMessage, - payload: StreamReader, - protocol: "RequestHandler", - payload_writer: AbstractStreamWriter, - task: "asyncio.Task[None]", - loop: asyncio.AbstractEventLoop, - *, - client_max_size: int = 1024**2, - state: Optional[Dict[str, Any]] = None, - scheme: Optional[str] = None, - host: Optional[str] = None, - remote: Optional[str] = None, - ) -> None: - if state is None: - state = {} - self._message = message - self._protocol = protocol - self._payload_writer = payload_writer - - self._payload = payload - self._headers = message.headers - self._method = message.method - self._version = message.version - self._cache: Dict[str, Any] = {} - url = message.url - if url.is_absolute(): - # absolute URL is given, - # override auto-calculating url, host, and scheme - # all other properties should be good - self._cache["url"] = url - self._cache["host"] = url.host - self._cache["scheme"] = url.scheme - self._rel_url = url.relative() - else: - self._rel_url = message.url - self._post: Optional[MultiDictProxy[Union[str, bytes, FileField]]] = None - self._read_bytes: Optional[bytes] = None - - self._state = state - self._task = task - self._client_max_size = client_max_size - self._loop = loop - - transport = self._protocol.transport - assert transport is not None - self._transport_sslcontext = transport.get_extra_info("sslcontext") - self._transport_peername = transport.get_extra_info("peername") - - if scheme is not None: - self._cache["scheme"] = scheme - if host is not None: - self._cache["host"] = host - if remote is not None: - self._cache["remote"] = remote - - def clone( - self, - *, - method: Union[str, _SENTINEL] = sentinel, - rel_url: Union[StrOrURL, _SENTINEL] = sentinel, - headers: Union[LooseHeaders, _SENTINEL] = sentinel, - scheme: Union[str, _SENTINEL] = sentinel, - host: Union[str, _SENTINEL] = sentinel, - remote: Union[str, _SENTINEL] = sentinel, - client_max_size: Union[int, _SENTINEL] = sentinel, - ) -> "BaseRequest": - """Clone itself with replacement some attributes. - - Creates and returns a new instance of Request object. If no parameters - are given, an exact copy is returned. If a parameter is not passed, it - will reuse the one from the current request object. - """ - if self._read_bytes: - raise RuntimeError("Cannot clone request " "after reading its content") - - dct: Dict[str, Any] = {} - if method is not sentinel: - dct["method"] = method - if rel_url is not sentinel: - new_url: URL = URL(rel_url) - dct["url"] = new_url - dct["path"] = str(new_url) - if headers is not sentinel: - # a copy semantic - dct["headers"] = CIMultiDictProxy(CIMultiDict(headers)) - dct["raw_headers"] = tuple( - (k.encode("utf-8"), v.encode("utf-8")) for k, v in headers.items() - ) - - message = self._message._replace(**dct) - - kwargs = {} - if scheme is not sentinel: - kwargs["scheme"] = scheme - if host is not sentinel: - kwargs["host"] = host - if remote is not sentinel: - kwargs["remote"] = remote - if client_max_size is sentinel: - client_max_size = self._client_max_size - - return self.__class__( - message, - self._payload, - self._protocol, - self._payload_writer, - self._task, - self._loop, - client_max_size=client_max_size, - state=self._state.copy(), - **kwargs, - ) - - @property - def task(self) -> "asyncio.Task[None]": - return self._task - - @property - def protocol(self) -> "RequestHandler": - return self._protocol - - @property - def transport(self) -> Optional[asyncio.Transport]: - if self._protocol is None: - return None - return self._protocol.transport - - @property - def writer(self) -> AbstractStreamWriter: - return self._payload_writer - - @property - def client_max_size(self) -> int: - return self._client_max_size - - @reify - def message(self) -> RawRequestMessage: - warnings.warn("Request.message is deprecated", DeprecationWarning, stacklevel=3) - return self._message - - @reify - def rel_url(self) -> URL: - return self._rel_url - - @reify - def loop(self) -> asyncio.AbstractEventLoop: - warnings.warn( - "request.loop property is deprecated", DeprecationWarning, stacklevel=2 - ) - return self._loop - - # MutableMapping API - - def __getitem__(self, key: str) -> Any: - return self._state[key] - - def __setitem__(self, key: str, value: Any) -> None: - self._state[key] = value - - def __delitem__(self, key: str) -> None: - del self._state[key] - - def __len__(self) -> int: - return len(self._state) - - def __iter__(self) -> Iterator[str]: - return iter(self._state) - - ######## - - @reify - def secure(self) -> bool: - """A bool indicating if the request is handled with SSL.""" - return self.scheme == "https" - - @reify - def forwarded(self) -> Tuple[Mapping[str, str], ...]: - """A tuple containing all parsed Forwarded header(s). - - Makes an effort to parse Forwarded headers as specified by RFC 7239: - - - It adds one (immutable) dictionary per Forwarded 'field-value', ie - per proxy. The element corresponds to the data in the Forwarded - field-value added by the first proxy encountered by the client. Each - subsequent item corresponds to those added by later proxies. - - It checks that every value has valid syntax in general as specified - in section 4: either a 'token' or a 'quoted-string'. - - It un-escapes found escape sequences. - - It does NOT validate 'by' and 'for' contents as specified in section - 6. - - It does NOT validate 'host' contents (Host ABNF). - - It does NOT validate 'proto' contents for valid URI scheme names. - - Returns a tuple containing one or more immutable dicts - """ - elems = [] - for field_value in self._message.headers.getall(hdrs.FORWARDED, ()): - length = len(field_value) - pos = 0 - need_separator = False - elem: Dict[str, str] = {} - elems.append(types.MappingProxyType(elem)) - while 0 <= pos < length: - match = _FORWARDED_PAIR_RE.match(field_value, pos) - if match is not None: # got a valid forwarded-pair - if need_separator: - # bad syntax here, skip to next comma - pos = field_value.find(",", pos) - else: - name, value, port = match.groups() - if value[0] == '"': - # quoted string: remove quotes and unescape - value = _QUOTED_PAIR_REPLACE_RE.sub(r"\1", value[1:-1]) - if port: - value += port - elem[name.lower()] = value - pos += len(match.group(0)) - need_separator = True - elif field_value[pos] == ",": # next forwarded-element - need_separator = False - elem = {} - elems.append(types.MappingProxyType(elem)) - pos += 1 - elif field_value[pos] == ";": # next forwarded-pair - need_separator = False - pos += 1 - elif field_value[pos] in " \t": - # Allow whitespace even between forwarded-pairs, though - # RFC 7239 doesn't. This simplifies code and is in line - # with Postel's law. - pos += 1 - else: - # bad syntax here, skip to next comma - pos = field_value.find(",", pos) - return tuple(elems) - - @reify - def scheme(self) -> str: - """A string representing the scheme of the request. - - Hostname is resolved in this order: - - - overridden value by .clone(scheme=new_scheme) call. - - type of connection to peer: HTTPS if socket is SSL, HTTP otherwise. - - 'http' or 'https'. - """ - if self._transport_sslcontext: - return "https" - else: - return "http" - - @reify - def method(self) -> str: - """Read only property for getting HTTP method. - - The value is upper-cased str like 'GET', 'POST', 'PUT' etc. - """ - return self._method - - @reify - def version(self) -> HttpVersion: - """Read only property for getting HTTP version of request. - - Returns aiohttp.protocol.HttpVersion instance. - """ - return self._version - - @reify - def host(self) -> str: - """Hostname of the request. - - Hostname is resolved in this order: - - - overridden value by .clone(host=new_host) call. - - HOST HTTP header - - socket.getfqdn() value - """ - host = self._message.headers.get(hdrs.HOST) - if host is not None: - return host - return socket.getfqdn() - - @reify - def remote(self) -> Optional[str]: - """Remote IP of client initiated HTTP request. - - The IP is resolved in this order: - - - overridden value by .clone(remote=new_remote) call. - - peername of opened socket - """ - if self._transport_peername is None: - return None - if isinstance(self._transport_peername, (list, tuple)): - return str(self._transport_peername[0]) - return str(self._transport_peername) - - @reify - def url(self) -> URL: - url = URL.build(scheme=self.scheme, host=self.host) - return url.join(self._rel_url) - - @reify - def path(self) -> str: - """The URL including *PATH INFO* without the host or scheme. - - E.g., ``/app/blog`` - """ - return self._rel_url.path - - @reify - def path_qs(self) -> str: - """The URL including PATH_INFO and the query string. - - E.g, /app/blog?id=10 - """ - return str(self._rel_url) - - @reify - def raw_path(self) -> str: - """The URL including raw *PATH INFO* without the host or scheme. - - Warning, the path is unquoted and may contains non valid URL characters - - E.g., ``/my%2Fpath%7Cwith%21some%25strange%24characters`` - """ - return self._message.path - - @reify - def query(self) -> "MultiMapping[str]": - """A multidict with all the variables in the query string.""" - return MultiDictProxy(self._rel_url.query) - - @reify - def query_string(self) -> str: - """The query string in the URL. - - E.g., id=10 - """ - return self._rel_url.query_string - - @reify - def headers(self) -> "MultiMapping[str]": - """A case-insensitive multidict proxy with all headers.""" - return self._headers - - @reify - def raw_headers(self) -> RawHeaders: - """A sequence of pairs for all headers.""" - return self._message.raw_headers - - @reify - def if_modified_since(self) -> Optional[datetime.datetime]: - """The value of If-Modified-Since HTTP header, or None. - - This header is represented as a `datetime` object. - """ - return parse_http_date(self.headers.get(hdrs.IF_MODIFIED_SINCE)) - - @reify - def if_unmodified_since(self) -> Optional[datetime.datetime]: - """The value of If-Unmodified-Since HTTP header, or None. - - This header is represented as a `datetime` object. - """ - return parse_http_date(self.headers.get(hdrs.IF_UNMODIFIED_SINCE)) - - @staticmethod - def _etag_values(etag_header: str) -> Iterator[ETag]: - """Extract `ETag` objects from raw header.""" - if etag_header == ETAG_ANY: - yield ETag( - is_weak=False, - value=ETAG_ANY, - ) - else: - for match in LIST_QUOTED_ETAG_RE.finditer(etag_header): - is_weak, value, garbage = match.group(2, 3, 4) - # Any symbol captured by 4th group means - # that the following sequence is invalid. - if garbage: - break - - yield ETag( - is_weak=bool(is_weak), - value=value, - ) - - @classmethod - def _if_match_or_none_impl( - cls, header_value: Optional[str] - ) -> Optional[Tuple[ETag, ...]]: - if not header_value: - return None - - return tuple(cls._etag_values(header_value)) - - @reify - def if_match(self) -> Optional[Tuple[ETag, ...]]: - """The value of If-Match HTTP header, or None. - - This header is represented as a `tuple` of `ETag` objects. - """ - return self._if_match_or_none_impl(self.headers.get(hdrs.IF_MATCH)) - - @reify - def if_none_match(self) -> Optional[Tuple[ETag, ...]]: - """The value of If-None-Match HTTP header, or None. - - This header is represented as a `tuple` of `ETag` objects. - """ - return self._if_match_or_none_impl(self.headers.get(hdrs.IF_NONE_MATCH)) - - @reify - def if_range(self) -> Optional[datetime.datetime]: - """The value of If-Range HTTP header, or None. - - This header is represented as a `datetime` object. - """ - return parse_http_date(self.headers.get(hdrs.IF_RANGE)) - - @reify - def keep_alive(self) -> bool: - """Is keepalive enabled by client?""" - return not self._message.should_close - - @reify - def cookies(self) -> Mapping[str, str]: - """Return request cookies. - - A read-only dictionary-like object. - """ - raw = self.headers.get(hdrs.COOKIE, "") - parsed = SimpleCookie(raw) - return MappingProxyType({key: val.value for key, val in parsed.items()}) - - @reify - def http_range(self) -> slice: - """The content of Range HTTP header. - - Return a slice instance. - - """ - rng = self._headers.get(hdrs.RANGE) - start, end = None, None - if rng is not None: - try: - pattern = r"^bytes=(\d*)-(\d*)$" - start, end = re.findall(pattern, rng)[0] - except IndexError: # pattern was not found in header - raise ValueError("range not in acceptable format") - - end = int(end) if end else None - start = int(start) if start else None - - if start is None and end is not None: - # end with no start is to return tail of content - start = -end - end = None - - if start is not None and end is not None: - # end is inclusive in range header, exclusive for slice - end += 1 - - if start >= end: - raise ValueError("start cannot be after end") - - if start is end is None: # No valid range supplied - raise ValueError("No start or end of range specified") - - return slice(start, end, 1) - - @reify - def content(self) -> StreamReader: - """Return raw payload stream.""" - return self._payload - - @property - def has_body(self) -> bool: - """Return True if request's HTTP BODY can be read, False otherwise.""" - warnings.warn( - "Deprecated, use .can_read_body #2005", DeprecationWarning, stacklevel=2 - ) - return not self._payload.at_eof() - - @property - def can_read_body(self) -> bool: - """Return True if request's HTTP BODY can be read, False otherwise.""" - return not self._payload.at_eof() - - @reify - def body_exists(self) -> bool: - """Return True if request has HTTP BODY, False otherwise.""" - return type(self._payload) is not EmptyStreamReader - - async def release(self) -> None: - """Release request. - - Eat unread part of HTTP BODY if present. - """ - while not self._payload.at_eof(): - await self._payload.readany() - - async def read(self) -> bytes: - """Read request body if present. - - Returns bytes object with full request content. - """ - if self._read_bytes is None: - body = bytearray() - while True: - chunk = await self._payload.readany() - body.extend(chunk) - if self._client_max_size: - body_size = len(body) - if body_size >= self._client_max_size: - raise HTTPRequestEntityTooLarge( - max_size=self._client_max_size, actual_size=body_size - ) - if not chunk: - break - self._read_bytes = bytes(body) - return self._read_bytes - - async def text(self) -> str: - """Return BODY as text using encoding from .charset.""" - bytes_body = await self.read() - encoding = self.charset or "utf-8" - return bytes_body.decode(encoding) - - async def json(self, *, loads: JSONDecoder = DEFAULT_JSON_DECODER) -> Any: - """Return BODY as JSON.""" - body = await self.text() - return loads(body) - - async def multipart(self) -> MultipartReader: - """Return async iterator to process BODY as multipart.""" - return MultipartReader(self._headers, self._payload) - - async def post(self) -> "MultiDictProxy[Union[str, bytes, FileField]]": - """Return POST parameters.""" - if self._post is not None: - return self._post - if self._method not in self.POST_METHODS: - self._post = MultiDictProxy(MultiDict()) - return self._post - - content_type = self.content_type - if content_type not in ( - "", - "application/x-www-form-urlencoded", - "multipart/form-data", - ): - self._post = MultiDictProxy(MultiDict()) - return self._post - - out: MultiDict[Union[str, bytes, FileField]] = MultiDict() - - if content_type == "multipart/form-data": - multipart = await self.multipart() - max_size = self._client_max_size - - field = await multipart.next() - while field is not None: - size = 0 - field_ct = field.headers.get(hdrs.CONTENT_TYPE) - - if isinstance(field, BodyPartReader): - assert field.name is not None - - # Note that according to RFC 7578, the Content-Type header - # is optional, even for files, so we can't assume it's - # present. - # https://tools.ietf.org/html/rfc7578#section-4.4 - if field.filename: - # store file in temp file - tmp = tempfile.TemporaryFile() - chunk = await field.read_chunk(size=2**16) - while chunk: - chunk = field.decode(chunk) - tmp.write(chunk) - size += len(chunk) - if 0 < max_size < size: - tmp.close() - raise HTTPRequestEntityTooLarge( - max_size=max_size, actual_size=size - ) - chunk = await field.read_chunk(size=2**16) - tmp.seek(0) - - if field_ct is None: - field_ct = "application/octet-stream" - - ff = FileField( - field.name, - field.filename, - cast(io.BufferedReader, tmp), - field_ct, - field.headers, - ) - out.add(field.name, ff) - else: - # deal with ordinary data - value = await field.read(decode=True) - if field_ct is None or field_ct.startswith("text/"): - charset = field.get_charset(default="utf-8") - out.add(field.name, value.decode(charset)) - else: - out.add(field.name, value) - size += len(value) - if 0 < max_size < size: - raise HTTPRequestEntityTooLarge( - max_size=max_size, actual_size=size - ) - else: - raise ValueError( - "To decode nested multipart you need " "to use custom reader", - ) - - field = await multipart.next() - else: - data = await self.read() - if data: - charset = self.charset or "utf-8" - out.extend( - parse_qsl( - data.rstrip().decode(charset), - keep_blank_values=True, - encoding=charset, - ) - ) - - self._post = MultiDictProxy(out) - return self._post - - def get_extra_info(self, name: str, default: Any = None) -> Any: - """Extra info from protocol transport""" - protocol = self._protocol - if protocol is None: - return default - - transport = protocol.transport - if transport is None: - return default - - return transport.get_extra_info(name, default) - - def __repr__(self) -> str: - ascii_encodable_path = self.path.encode("ascii", "backslashreplace").decode( - "ascii" - ) - return "<{} {} {} >".format( - self.__class__.__name__, self._method, ascii_encodable_path - ) - - def __eq__(self, other: object) -> bool: - return id(self) == id(other) - - def __bool__(self) -> bool: - return True - - async def _prepare_hook(self, response: StreamResponse) -> None: - return - - def _cancel(self, exc: BaseException) -> None: - self._payload.set_exception(exc) - - -class Request(BaseRequest): - - ATTRS = BaseRequest.ATTRS | frozenset(["_match_info"]) - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - # matchdict, route_name, handler - # or information about traversal lookup - - # initialized after route resolving - self._match_info: Optional[UrlMappingMatchInfo] = None - - if DEBUG: - - def __setattr__(self, name: str, val: Any) -> None: - if name not in self.ATTRS: - warnings.warn( - "Setting custom {}.{} attribute " - "is discouraged".format(self.__class__.__name__, name), - DeprecationWarning, - stacklevel=2, - ) - super().__setattr__(name, val) - - def clone( - self, - *, - method: Union[str, _SENTINEL] = sentinel, - rel_url: Union[StrOrURL, _SENTINEL] = sentinel, - headers: Union[LooseHeaders, _SENTINEL] = sentinel, - scheme: Union[str, _SENTINEL] = sentinel, - host: Union[str, _SENTINEL] = sentinel, - remote: Union[str, _SENTINEL] = sentinel, - client_max_size: Union[int, _SENTINEL] = sentinel, - ) -> "Request": - ret = super().clone( - method=method, - rel_url=rel_url, - headers=headers, - scheme=scheme, - host=host, - remote=remote, - client_max_size=client_max_size, - ) - new_ret = cast(Request, ret) - new_ret._match_info = self._match_info - return new_ret - - @reify - def match_info(self) -> "UrlMappingMatchInfo": - """Result of route resolving.""" - match_info = self._match_info - assert match_info is not None - return match_info - - @property - def app(self) -> "Application": - """Application instance.""" - match_info = self._match_info - assert match_info is not None - return match_info.current_app - - @property - def config_dict(self) -> ChainMapProxy: - match_info = self._match_info - assert match_info is not None - lst = match_info.apps - app = self.app - idx = lst.index(app) - sublist = list(reversed(lst[: idx + 1])) - return ChainMapProxy(sublist) - - async def _prepare_hook(self, response: StreamResponse) -> None: - match_info = self._match_info - if match_info is None: - return - for app in match_info._apps: - await app.on_response_prepare.send(self, response) diff --git a/.venv/Lib/site-packages/aiohttp/web_response.py b/.venv/Lib/site-packages/aiohttp/web_response.py deleted file mode 100644 index b6a4ba9..0000000 --- a/.venv/Lib/site-packages/aiohttp/web_response.py +++ /dev/null @@ -1,817 +0,0 @@ -import asyncio -import collections.abc -import datetime -import enum -import json -import math -import time -import warnings -from concurrent.futures import Executor -from http import HTTPStatus -from http.cookies import SimpleCookie -from typing import ( - TYPE_CHECKING, - Any, - Dict, - Iterator, - MutableMapping, - Optional, - Union, - cast, -) - -from multidict import CIMultiDict, istr - -from . import hdrs, payload -from .abc import AbstractStreamWriter -from .compression_utils import ZLibCompressor -from .helpers import ( - ETAG_ANY, - QUOTED_ETAG_RE, - ETag, - HeadersMixin, - must_be_empty_body, - parse_http_date, - rfc822_formatted_time, - sentinel, - should_remove_content_length, - validate_etag_value, -) -from .http import SERVER_SOFTWARE, HttpVersion10, HttpVersion11 -from .payload import Payload -from .typedefs import JSONEncoder, LooseHeaders - -__all__ = ("ContentCoding", "StreamResponse", "Response", "json_response") - - -if TYPE_CHECKING: - from .web_request import BaseRequest - - BaseClass = MutableMapping[str, Any] -else: - BaseClass = collections.abc.MutableMapping - - -class ContentCoding(enum.Enum): - # The content codings that we have support for. - # - # Additional registered codings are listed at: - # https://www.iana.org/assignments/http-parameters/http-parameters.xhtml#content-coding - deflate = "deflate" - gzip = "gzip" - identity = "identity" - - -############################################################ -# HTTP Response classes -############################################################ - - -class StreamResponse(BaseClass, HeadersMixin): - - _length_check = True - - def __init__( - self, - *, - status: int = 200, - reason: Optional[str] = None, - headers: Optional[LooseHeaders] = None, - ) -> None: - self._body = None - self._keep_alive: Optional[bool] = None - self._chunked = False - self._compression = False - self._compression_force: Optional[ContentCoding] = None - self._cookies = SimpleCookie() - - self._req: Optional[BaseRequest] = None - self._payload_writer: Optional[AbstractStreamWriter] = None - self._eof_sent = False - self._must_be_empty_body: Optional[bool] = None - self._body_length = 0 - self._state: Dict[str, Any] = {} - - if headers is not None: - self._headers: CIMultiDict[str] = CIMultiDict(headers) - else: - self._headers = CIMultiDict() - - self.set_status(status, reason) - - @property - def prepared(self) -> bool: - return self._payload_writer is not None - - @property - def task(self) -> "Optional[asyncio.Task[None]]": - if self._req: - return self._req.task - else: - return None - - @property - def status(self) -> int: - return self._status - - @property - def chunked(self) -> bool: - return self._chunked - - @property - def compression(self) -> bool: - return self._compression - - @property - def reason(self) -> str: - return self._reason - - def set_status( - self, - status: int, - reason: Optional[str] = None, - ) -> None: - assert not self.prepared, ( - "Cannot change the response status code after " "the headers have been sent" - ) - self._status = int(status) - if reason is None: - try: - reason = HTTPStatus(self._status).phrase - except ValueError: - reason = "" - self._reason = reason - - @property - def keep_alive(self) -> Optional[bool]: - return self._keep_alive - - def force_close(self) -> None: - self._keep_alive = False - - @property - def body_length(self) -> int: - return self._body_length - - @property - def output_length(self) -> int: - warnings.warn("output_length is deprecated", DeprecationWarning) - assert self._payload_writer - return self._payload_writer.buffer_size - - def enable_chunked_encoding(self, chunk_size: Optional[int] = None) -> None: - """Enables automatic chunked transfer encoding.""" - self._chunked = True - - if hdrs.CONTENT_LENGTH in self._headers: - raise RuntimeError( - "You can't enable chunked encoding when " "a content length is set" - ) - if chunk_size is not None: - warnings.warn("Chunk size is deprecated #1615", DeprecationWarning) - - def enable_compression( - self, force: Optional[Union[bool, ContentCoding]] = None - ) -> None: - """Enables response compression encoding.""" - # Backwards compatibility for when force was a bool <0.17. - if type(force) == bool: - force = ContentCoding.deflate if force else ContentCoding.identity - warnings.warn( - "Using boolean for force is deprecated #3318", DeprecationWarning - ) - elif force is not None: - assert isinstance(force, ContentCoding), ( - "force should one of " "None, bool or " "ContentEncoding" - ) - - self._compression = True - self._compression_force = force - - @property - def headers(self) -> "CIMultiDict[str]": - return self._headers - - @property - def cookies(self) -> SimpleCookie: - return self._cookies - - def set_cookie( - self, - name: str, - value: str, - *, - expires: Optional[str] = None, - domain: Optional[str] = None, - max_age: Optional[Union[int, str]] = None, - path: str = "/", - secure: Optional[bool] = None, - httponly: Optional[bool] = None, - version: Optional[str] = None, - samesite: Optional[str] = None, - ) -> None: - """Set or update response cookie. - - Sets new cookie or updates existent with new value. - Also updates only those params which are not None. - """ - old = self._cookies.get(name) - if old is not None and old.coded_value == "": - # deleted cookie - self._cookies.pop(name, None) - - self._cookies[name] = value - c = self._cookies[name] - - if expires is not None: - c["expires"] = expires - elif c.get("expires") == "Thu, 01 Jan 1970 00:00:00 GMT": - del c["expires"] - - if domain is not None: - c["domain"] = domain - - if max_age is not None: - c["max-age"] = str(max_age) - elif "max-age" in c: - del c["max-age"] - - c["path"] = path - - if secure is not None: - c["secure"] = secure - if httponly is not None: - c["httponly"] = httponly - if version is not None: - c["version"] = version - if samesite is not None: - c["samesite"] = samesite - - def del_cookie( - self, name: str, *, domain: Optional[str] = None, path: str = "/" - ) -> None: - """Delete cookie. - - Creates new empty expired cookie. - """ - # TODO: do we need domain/path here? - self._cookies.pop(name, None) - self.set_cookie( - name, - "", - max_age=0, - expires="Thu, 01 Jan 1970 00:00:00 GMT", - domain=domain, - path=path, - ) - - @property - def content_length(self) -> Optional[int]: - # Just a placeholder for adding setter - return super().content_length - - @content_length.setter - def content_length(self, value: Optional[int]) -> None: - if value is not None: - value = int(value) - if self._chunked: - raise RuntimeError( - "You can't set content length when " "chunked encoding is enable" - ) - self._headers[hdrs.CONTENT_LENGTH] = str(value) - else: - self._headers.pop(hdrs.CONTENT_LENGTH, None) - - @property - def content_type(self) -> str: - # Just a placeholder for adding setter - return super().content_type - - @content_type.setter - def content_type(self, value: str) -> None: - self.content_type # read header values if needed - self._content_type = str(value) - self._generate_content_type_header() - - @property - def charset(self) -> Optional[str]: - # Just a placeholder for adding setter - return super().charset - - @charset.setter - def charset(self, value: Optional[str]) -> None: - ctype = self.content_type # read header values if needed - if ctype == "application/octet-stream": - raise RuntimeError( - "Setting charset for application/octet-stream " - "doesn't make sense, setup content_type first" - ) - assert self._content_dict is not None - if value is None: - self._content_dict.pop("charset", None) - else: - self._content_dict["charset"] = str(value).lower() - self._generate_content_type_header() - - @property - def last_modified(self) -> Optional[datetime.datetime]: - """The value of Last-Modified HTTP header, or None. - - This header is represented as a `datetime` object. - """ - return parse_http_date(self._headers.get(hdrs.LAST_MODIFIED)) - - @last_modified.setter - def last_modified( - self, value: Optional[Union[int, float, datetime.datetime, str]] - ) -> None: - if value is None: - self._headers.pop(hdrs.LAST_MODIFIED, None) - elif isinstance(value, (int, float)): - self._headers[hdrs.LAST_MODIFIED] = time.strftime( - "%a, %d %b %Y %H:%M:%S GMT", time.gmtime(math.ceil(value)) - ) - elif isinstance(value, datetime.datetime): - self._headers[hdrs.LAST_MODIFIED] = time.strftime( - "%a, %d %b %Y %H:%M:%S GMT", value.utctimetuple() - ) - elif isinstance(value, str): - self._headers[hdrs.LAST_MODIFIED] = value - - @property - def etag(self) -> Optional[ETag]: - quoted_value = self._headers.get(hdrs.ETAG) - if not quoted_value: - return None - elif quoted_value == ETAG_ANY: - return ETag(value=ETAG_ANY) - match = QUOTED_ETAG_RE.fullmatch(quoted_value) - if not match: - return None - is_weak, value = match.group(1, 2) - return ETag( - is_weak=bool(is_weak), - value=value, - ) - - @etag.setter - def etag(self, value: Optional[Union[ETag, str]]) -> None: - if value is None: - self._headers.pop(hdrs.ETAG, None) - elif (isinstance(value, str) and value == ETAG_ANY) or ( - isinstance(value, ETag) and value.value == ETAG_ANY - ): - self._headers[hdrs.ETAG] = ETAG_ANY - elif isinstance(value, str): - validate_etag_value(value) - self._headers[hdrs.ETAG] = f'"{value}"' - elif isinstance(value, ETag) and isinstance(value.value, str): - validate_etag_value(value.value) - hdr_value = f'W/"{value.value}"' if value.is_weak else f'"{value.value}"' - self._headers[hdrs.ETAG] = hdr_value - else: - raise ValueError( - f"Unsupported etag type: {type(value)}. " - f"etag must be str, ETag or None" - ) - - def _generate_content_type_header( - self, CONTENT_TYPE: istr = hdrs.CONTENT_TYPE - ) -> None: - assert self._content_dict is not None - assert self._content_type is not None - params = "; ".join(f"{k}={v}" for k, v in self._content_dict.items()) - if params: - ctype = self._content_type + "; " + params - else: - ctype = self._content_type - self._headers[CONTENT_TYPE] = ctype - - async def _do_start_compression(self, coding: ContentCoding) -> None: - if coding != ContentCoding.identity: - assert self._payload_writer is not None - self._headers[hdrs.CONTENT_ENCODING] = coding.value - self._payload_writer.enable_compression(coding.value) - # Compressed payload may have different content length, - # remove the header - self._headers.popall(hdrs.CONTENT_LENGTH, None) - - async def _start_compression(self, request: "BaseRequest") -> None: - if self._compression_force: - await self._do_start_compression(self._compression_force) - else: - accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower() - for coding in ContentCoding: - if coding.value in accept_encoding: - await self._do_start_compression(coding) - return - - async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]: - if self._eof_sent: - return None - if self._payload_writer is not None: - return self._payload_writer - self._must_be_empty_body = must_be_empty_body(request.method, self.status) - return await self._start(request) - - async def _start(self, request: "BaseRequest") -> AbstractStreamWriter: - self._req = request - writer = self._payload_writer = request._payload_writer - - await self._prepare_headers() - await request._prepare_hook(self) - await self._write_headers() - - return writer - - async def _prepare_headers(self) -> None: - request = self._req - assert request is not None - writer = self._payload_writer - assert writer is not None - keep_alive = self._keep_alive - if keep_alive is None: - keep_alive = request.keep_alive - self._keep_alive = keep_alive - - version = request.version - - headers = self._headers - for cookie in self._cookies.values(): - value = cookie.output(header="")[1:] - headers.add(hdrs.SET_COOKIE, value) - - if self._compression: - await self._start_compression(request) - - if self._chunked: - if version != HttpVersion11: - raise RuntimeError( - "Using chunked encoding is forbidden " - "for HTTP/{0.major}.{0.minor}".format(request.version) - ) - if not self._must_be_empty_body: - writer.enable_chunking() - headers[hdrs.TRANSFER_ENCODING] = "chunked" - if hdrs.CONTENT_LENGTH in headers: - del headers[hdrs.CONTENT_LENGTH] - elif self._length_check: - writer.length = self.content_length - if writer.length is None: - if version >= HttpVersion11: - if not self._must_be_empty_body: - writer.enable_chunking() - headers[hdrs.TRANSFER_ENCODING] = "chunked" - elif not self._must_be_empty_body: - keep_alive = False - - # HTTP 1.1: https://tools.ietf.org/html/rfc7230#section-3.3.2 - # HTTP 1.0: https://tools.ietf.org/html/rfc1945#section-10.4 - if self._must_be_empty_body: - if hdrs.CONTENT_LENGTH in headers and should_remove_content_length( - request.method, self.status - ): - del headers[hdrs.CONTENT_LENGTH] - # https://datatracker.ietf.org/doc/html/rfc9112#section-6.1-10 - # https://datatracker.ietf.org/doc/html/rfc9112#section-6.1-13 - if hdrs.TRANSFER_ENCODING in headers: - del headers[hdrs.TRANSFER_ENCODING] - else: - headers.setdefault(hdrs.CONTENT_TYPE, "application/octet-stream") - headers.setdefault(hdrs.DATE, rfc822_formatted_time()) - headers.setdefault(hdrs.SERVER, SERVER_SOFTWARE) - - # connection header - if hdrs.CONNECTION not in headers: - if keep_alive: - if version == HttpVersion10: - headers[hdrs.CONNECTION] = "keep-alive" - else: - if version == HttpVersion11: - headers[hdrs.CONNECTION] = "close" - - async def _write_headers(self) -> None: - request = self._req - assert request is not None - writer = self._payload_writer - assert writer is not None - # status line - version = request.version - status_line = "HTTP/{}.{} {} {}".format( - version[0], version[1], self._status, self._reason - ) - await writer.write_headers(status_line, self._headers) - - async def write(self, data: bytes) -> None: - assert isinstance( - data, (bytes, bytearray, memoryview) - ), "data argument must be byte-ish (%r)" % type(data) - - if self._eof_sent: - raise RuntimeError("Cannot call write() after write_eof()") - if self._payload_writer is None: - raise RuntimeError("Cannot call write() before prepare()") - - await self._payload_writer.write(data) - - async def drain(self) -> None: - assert not self._eof_sent, "EOF has already been sent" - assert self._payload_writer is not None, "Response has not been started" - warnings.warn( - "drain method is deprecated, use await resp.write()", - DeprecationWarning, - stacklevel=2, - ) - await self._payload_writer.drain() - - async def write_eof(self, data: bytes = b"") -> None: - assert isinstance( - data, (bytes, bytearray, memoryview) - ), "data argument must be byte-ish (%r)" % type(data) - - if self._eof_sent: - return - - assert self._payload_writer is not None, "Response has not been started" - - await self._payload_writer.write_eof(data) - self._eof_sent = True - self._req = None - self._body_length = self._payload_writer.output_size - self._payload_writer = None - - def __repr__(self) -> str: - if self._eof_sent: - info = "eof" - elif self.prepared: - assert self._req is not None - info = f"{self._req.method} {self._req.path} " - else: - info = "not prepared" - return f"<{self.__class__.__name__} {self.reason} {info}>" - - def __getitem__(self, key: str) -> Any: - return self._state[key] - - def __setitem__(self, key: str, value: Any) -> None: - self._state[key] = value - - def __delitem__(self, key: str) -> None: - del self._state[key] - - def __len__(self) -> int: - return len(self._state) - - def __iter__(self) -> Iterator[str]: - return iter(self._state) - - def __hash__(self) -> int: - return hash(id(self)) - - def __eq__(self, other: object) -> bool: - return self is other - - -class Response(StreamResponse): - def __init__( - self, - *, - body: Any = None, - status: int = 200, - reason: Optional[str] = None, - text: Optional[str] = None, - headers: Optional[LooseHeaders] = None, - content_type: Optional[str] = None, - charset: Optional[str] = None, - zlib_executor_size: Optional[int] = None, - zlib_executor: Optional[Executor] = None, - ) -> None: - if body is not None and text is not None: - raise ValueError("body and text are not allowed together") - - if headers is None: - real_headers: CIMultiDict[str] = CIMultiDict() - elif not isinstance(headers, CIMultiDict): - real_headers = CIMultiDict(headers) - else: - real_headers = headers # = cast('CIMultiDict[str]', headers) - - if content_type is not None and "charset" in content_type: - raise ValueError("charset must not be in content_type " "argument") - - if text is not None: - if hdrs.CONTENT_TYPE in real_headers: - if content_type or charset: - raise ValueError( - "passing both Content-Type header and " - "content_type or charset params " - "is forbidden" - ) - else: - # fast path for filling headers - if not isinstance(text, str): - raise TypeError("text argument must be str (%r)" % type(text)) - if content_type is None: - content_type = "text/plain" - if charset is None: - charset = "utf-8" - real_headers[hdrs.CONTENT_TYPE] = content_type + "; charset=" + charset - body = text.encode(charset) - text = None - else: - if hdrs.CONTENT_TYPE in real_headers: - if content_type is not None or charset is not None: - raise ValueError( - "passing both Content-Type header and " - "content_type or charset params " - "is forbidden" - ) - else: - if content_type is not None: - if charset is not None: - content_type += "; charset=" + charset - real_headers[hdrs.CONTENT_TYPE] = content_type - - super().__init__(status=status, reason=reason, headers=real_headers) - - if text is not None: - self.text = text - else: - self.body = body - - self._compressed_body: Optional[bytes] = None - self._zlib_executor_size = zlib_executor_size - self._zlib_executor = zlib_executor - - @property - def body(self) -> Optional[Union[bytes, Payload]]: - return self._body - - @body.setter - def body(self, body: bytes) -> None: - if body is None: - self._body: Optional[bytes] = None - self._body_payload: bool = False - elif isinstance(body, (bytes, bytearray)): - self._body = body - self._body_payload = False - else: - try: - self._body = body = payload.PAYLOAD_REGISTRY.get(body) - except payload.LookupError: - raise ValueError("Unsupported body type %r" % type(body)) - - self._body_payload = True - - headers = self._headers - - # set content-type - if hdrs.CONTENT_TYPE not in headers: - headers[hdrs.CONTENT_TYPE] = body.content_type - - # copy payload headers - if body.headers: - for (key, value) in body.headers.items(): - if key not in headers: - headers[key] = value - - self._compressed_body = None - - @property - def text(self) -> Optional[str]: - if self._body is None: - return None - return self._body.decode(self.charset or "utf-8") - - @text.setter - def text(self, text: str) -> None: - assert text is None or isinstance( - text, str - ), "text argument must be str (%r)" % type(text) - - if self.content_type == "application/octet-stream": - self.content_type = "text/plain" - if self.charset is None: - self.charset = "utf-8" - - self._body = text.encode(self.charset) - self._body_payload = False - self._compressed_body = None - - @property - def content_length(self) -> Optional[int]: - if self._chunked: - return None - - if hdrs.CONTENT_LENGTH in self._headers: - return super().content_length - - if self._compressed_body is not None: - # Return length of the compressed body - return len(self._compressed_body) - elif self._body_payload: - # A payload without content length, or a compressed payload - return None - elif self._body is not None: - return len(self._body) - else: - return 0 - - @content_length.setter - def content_length(self, value: Optional[int]) -> None: - raise RuntimeError("Content length is set automatically") - - async def write_eof(self, data: bytes = b"") -> None: - if self._eof_sent: - return - if self._compressed_body is None: - body: Optional[Union[bytes, Payload]] = self._body - else: - body = self._compressed_body - assert not data, f"data arg is not supported, got {data!r}" - assert self._req is not None - assert self._payload_writer is not None - if body is not None: - if self._must_be_empty_body: - await super().write_eof() - elif self._body_payload: - payload = cast(Payload, body) - await payload.write(self._payload_writer) - await super().write_eof() - else: - await super().write_eof(cast(bytes, body)) - else: - await super().write_eof() - - async def _start(self, request: "BaseRequest") -> AbstractStreamWriter: - if should_remove_content_length(request.method, self.status): - if hdrs.CONTENT_LENGTH in self._headers: - del self._headers[hdrs.CONTENT_LENGTH] - elif not self._chunked and hdrs.CONTENT_LENGTH not in self._headers: - if self._body_payload: - size = cast(Payload, self._body).size - if size is not None: - self._headers[hdrs.CONTENT_LENGTH] = str(size) - else: - body_len = len(self._body) if self._body else "0" - # https://www.rfc-editor.org/rfc/rfc9110.html#section-8.6-7 - if body_len != "0" or ( - self.status != 304 and request.method.upper() != hdrs.METH_HEAD - ): - self._headers[hdrs.CONTENT_LENGTH] = str(body_len) - - return await super()._start(request) - - async def _do_start_compression(self, coding: ContentCoding) -> None: - if self._body_payload or self._chunked: - return await super()._do_start_compression(coding) - - if coding != ContentCoding.identity: - # Instead of using _payload_writer.enable_compression, - # compress the whole body - compressor = ZLibCompressor( - encoding=str(coding.value), - max_sync_chunk_size=self._zlib_executor_size, - executor=self._zlib_executor, - ) - assert self._body is not None - if self._zlib_executor_size is None and len(self._body) > 1024 * 1024: - warnings.warn( - "Synchronous compression of large response bodies " - f"({len(self._body)} bytes) might block the async event loop. " - "Consider providing a custom value to zlib_executor_size/" - "zlib_executor response properties or disabling compression on it." - ) - self._compressed_body = ( - await compressor.compress(self._body) + compressor.flush() - ) - assert self._compressed_body is not None - - self._headers[hdrs.CONTENT_ENCODING] = coding.value - self._headers[hdrs.CONTENT_LENGTH] = str(len(self._compressed_body)) - - -def json_response( - data: Any = sentinel, - *, - text: Optional[str] = None, - body: Optional[bytes] = None, - status: int = 200, - reason: Optional[str] = None, - headers: Optional[LooseHeaders] = None, - content_type: str = "application/json", - dumps: JSONEncoder = json.dumps, -) -> Response: - if data is not sentinel: - if text or body: - raise ValueError("only one of data, text, or body should be specified") - else: - text = dumps(data) - return Response( - text=text, - body=body, - status=status, - reason=reason, - headers=headers, - content_type=content_type, - ) diff --git a/.venv/Lib/site-packages/aiohttp/web_routedef.py b/.venv/Lib/site-packages/aiohttp/web_routedef.py deleted file mode 100644 index d79cd32..0000000 --- a/.venv/Lib/site-packages/aiohttp/web_routedef.py +++ /dev/null @@ -1,216 +0,0 @@ -import abc -import os # noqa -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Dict, - Iterator, - List, - Optional, - Sequence, - Type, - Union, - overload, -) - -import attr - -from . import hdrs -from .abc import AbstractView -from .typedefs import Handler, PathLike - -if TYPE_CHECKING: - from .web_request import Request - from .web_response import StreamResponse - from .web_urldispatcher import AbstractRoute, UrlDispatcher -else: - Request = StreamResponse = UrlDispatcher = AbstractRoute = None - - -__all__ = ( - "AbstractRouteDef", - "RouteDef", - "StaticDef", - "RouteTableDef", - "head", - "options", - "get", - "post", - "patch", - "put", - "delete", - "route", - "view", - "static", -) - - -class AbstractRouteDef(abc.ABC): - @abc.abstractmethod - def register(self, router: UrlDispatcher) -> List[AbstractRoute]: - pass # pragma: no cover - - -_HandlerType = Union[Type[AbstractView], Handler] - - -@attr.s(auto_attribs=True, frozen=True, repr=False, slots=True) -class RouteDef(AbstractRouteDef): - method: str - path: str - handler: _HandlerType - kwargs: Dict[str, Any] - - def __repr__(self) -> str: - info = [] - for name, value in sorted(self.kwargs.items()): - info.append(f", {name}={value!r}") - return " {handler.__name__!r}" "{info}>".format( - method=self.method, path=self.path, handler=self.handler, info="".join(info) - ) - - def register(self, router: UrlDispatcher) -> List[AbstractRoute]: - if self.method in hdrs.METH_ALL: - reg = getattr(router, "add_" + self.method.lower()) - return [reg(self.path, self.handler, **self.kwargs)] - else: - return [ - router.add_route(self.method, self.path, self.handler, **self.kwargs) - ] - - -@attr.s(auto_attribs=True, frozen=True, repr=False, slots=True) -class StaticDef(AbstractRouteDef): - prefix: str - path: PathLike - kwargs: Dict[str, Any] - - def __repr__(self) -> str: - info = [] - for name, value in sorted(self.kwargs.items()): - info.append(f", {name}={value!r}") - return " {path}" "{info}>".format( - prefix=self.prefix, path=self.path, info="".join(info) - ) - - def register(self, router: UrlDispatcher) -> List[AbstractRoute]: - resource = router.add_static(self.prefix, self.path, **self.kwargs) - routes = resource.get_info().get("routes", {}) - return list(routes.values()) - - -def route(method: str, path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: - return RouteDef(method, path, handler, kwargs) - - -def head(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: - return route(hdrs.METH_HEAD, path, handler, **kwargs) - - -def options(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: - return route(hdrs.METH_OPTIONS, path, handler, **kwargs) - - -def get( - path: str, - handler: _HandlerType, - *, - name: Optional[str] = None, - allow_head: bool = True, - **kwargs: Any, -) -> RouteDef: - return route( - hdrs.METH_GET, path, handler, name=name, allow_head=allow_head, **kwargs - ) - - -def post(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: - return route(hdrs.METH_POST, path, handler, **kwargs) - - -def put(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: - return route(hdrs.METH_PUT, path, handler, **kwargs) - - -def patch(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: - return route(hdrs.METH_PATCH, path, handler, **kwargs) - - -def delete(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: - return route(hdrs.METH_DELETE, path, handler, **kwargs) - - -def view(path: str, handler: Type[AbstractView], **kwargs: Any) -> RouteDef: - return route(hdrs.METH_ANY, path, handler, **kwargs) - - -def static(prefix: str, path: PathLike, **kwargs: Any) -> StaticDef: - return StaticDef(prefix, path, kwargs) - - -_Deco = Callable[[_HandlerType], _HandlerType] - - -class RouteTableDef(Sequence[AbstractRouteDef]): - """Route definition table""" - - def __init__(self) -> None: - self._items: List[AbstractRouteDef] = [] - - def __repr__(self) -> str: - return f"" - - @overload - def __getitem__(self, index: int) -> AbstractRouteDef: - ... - - @overload - def __getitem__(self, index: slice) -> List[AbstractRouteDef]: - ... - - def __getitem__(self, index): # type: ignore[no-untyped-def] - return self._items[index] - - def __iter__(self) -> Iterator[AbstractRouteDef]: - return iter(self._items) - - def __len__(self) -> int: - return len(self._items) - - def __contains__(self, item: object) -> bool: - return item in self._items - - def route(self, method: str, path: str, **kwargs: Any) -> _Deco: - def inner(handler: _HandlerType) -> _HandlerType: - self._items.append(RouteDef(method, path, handler, kwargs)) - return handler - - return inner - - def head(self, path: str, **kwargs: Any) -> _Deco: - return self.route(hdrs.METH_HEAD, path, **kwargs) - - def get(self, path: str, **kwargs: Any) -> _Deco: - return self.route(hdrs.METH_GET, path, **kwargs) - - def post(self, path: str, **kwargs: Any) -> _Deco: - return self.route(hdrs.METH_POST, path, **kwargs) - - def put(self, path: str, **kwargs: Any) -> _Deco: - return self.route(hdrs.METH_PUT, path, **kwargs) - - def patch(self, path: str, **kwargs: Any) -> _Deco: - return self.route(hdrs.METH_PATCH, path, **kwargs) - - def delete(self, path: str, **kwargs: Any) -> _Deco: - return self.route(hdrs.METH_DELETE, path, **kwargs) - - def options(self, path: str, **kwargs: Any) -> _Deco: - return self.route(hdrs.METH_OPTIONS, path, **kwargs) - - def view(self, path: str, **kwargs: Any) -> _Deco: - return self.route(hdrs.METH_ANY, path, **kwargs) - - def static(self, prefix: str, path: PathLike, **kwargs: Any) -> None: - self._items.append(StaticDef(prefix, path, kwargs)) diff --git a/.venv/Lib/site-packages/aiohttp/web_runner.py b/.venv/Lib/site-packages/aiohttp/web_runner.py deleted file mode 100644 index 6999b5c..0000000 --- a/.venv/Lib/site-packages/aiohttp/web_runner.py +++ /dev/null @@ -1,406 +0,0 @@ -import asyncio -import signal -import socket -import warnings -from abc import ABC, abstractmethod -from typing import Any, Awaitable, Callable, List, Optional, Set - -from yarl import URL - -from .typedefs import PathLike -from .web_app import Application -from .web_server import Server - -try: - from ssl import SSLContext -except ImportError: - SSLContext = object # type: ignore[misc,assignment] - - -__all__ = ( - "BaseSite", - "TCPSite", - "UnixSite", - "NamedPipeSite", - "SockSite", - "BaseRunner", - "AppRunner", - "ServerRunner", - "GracefulExit", -) - - -class GracefulExit(SystemExit): - code = 1 - - -def _raise_graceful_exit() -> None: - raise GracefulExit() - - -class BaseSite(ABC): - __slots__ = ("_runner", "_ssl_context", "_backlog", "_server") - - def __init__( - self, - runner: "BaseRunner", - *, - shutdown_timeout: float = 60.0, - ssl_context: Optional[SSLContext] = None, - backlog: int = 128, - ) -> None: - if runner.server is None: - raise RuntimeError("Call runner.setup() before making a site") - if shutdown_timeout != 60.0: - msg = "shutdown_timeout should be set on BaseRunner" - warnings.warn(msg, DeprecationWarning, stacklevel=2) - runner._shutdown_timeout = shutdown_timeout - self._runner = runner - self._ssl_context = ssl_context - self._backlog = backlog - self._server: Optional[asyncio.AbstractServer] = None - - @property - @abstractmethod - def name(self) -> str: - pass # pragma: no cover - - @abstractmethod - async def start(self) -> None: - self._runner._reg_site(self) - - async def stop(self) -> None: - self._runner._check_site(self) - if self._server is not None: # Maybe not started yet - self._server.close() - - self._runner._unreg_site(self) - - -class TCPSite(BaseSite): - __slots__ = ("_host", "_port", "_reuse_address", "_reuse_port") - - def __init__( - self, - runner: "BaseRunner", - host: Optional[str] = None, - port: Optional[int] = None, - *, - shutdown_timeout: float = 60.0, - ssl_context: Optional[SSLContext] = None, - backlog: int = 128, - reuse_address: Optional[bool] = None, - reuse_port: Optional[bool] = None, - ) -> None: - super().__init__( - runner, - shutdown_timeout=shutdown_timeout, - ssl_context=ssl_context, - backlog=backlog, - ) - self._host = host - if port is None: - port = 8443 if self._ssl_context else 8080 - self._port = port - self._reuse_address = reuse_address - self._reuse_port = reuse_port - - @property - def name(self) -> str: - scheme = "https" if self._ssl_context else "http" - host = "0.0.0.0" if self._host is None else self._host - return str(URL.build(scheme=scheme, host=host, port=self._port)) - - async def start(self) -> None: - await super().start() - loop = asyncio.get_event_loop() - server = self._runner.server - assert server is not None - self._server = await loop.create_server( - server, - self._host, - self._port, - ssl=self._ssl_context, - backlog=self._backlog, - reuse_address=self._reuse_address, - reuse_port=self._reuse_port, - ) - - -class UnixSite(BaseSite): - __slots__ = ("_path",) - - def __init__( - self, - runner: "BaseRunner", - path: PathLike, - *, - shutdown_timeout: float = 60.0, - ssl_context: Optional[SSLContext] = None, - backlog: int = 128, - ) -> None: - super().__init__( - runner, - shutdown_timeout=shutdown_timeout, - ssl_context=ssl_context, - backlog=backlog, - ) - self._path = path - - @property - def name(self) -> str: - scheme = "https" if self._ssl_context else "http" - return f"{scheme}://unix:{self._path}:" - - async def start(self) -> None: - await super().start() - loop = asyncio.get_event_loop() - server = self._runner.server - assert server is not None - self._server = await loop.create_unix_server( - server, - self._path, - ssl=self._ssl_context, - backlog=self._backlog, - ) - - -class NamedPipeSite(BaseSite): - __slots__ = ("_path",) - - def __init__( - self, runner: "BaseRunner", path: str, *, shutdown_timeout: float = 60.0 - ) -> None: - loop = asyncio.get_event_loop() - if not isinstance( - loop, asyncio.ProactorEventLoop # type: ignore[attr-defined] - ): - raise RuntimeError( - "Named Pipes only available in proactor" "loop under windows" - ) - super().__init__(runner, shutdown_timeout=shutdown_timeout) - self._path = path - - @property - def name(self) -> str: - return self._path - - async def start(self) -> None: - await super().start() - loop = asyncio.get_event_loop() - server = self._runner.server - assert server is not None - _server = await loop.start_serving_pipe( # type: ignore[attr-defined] - server, self._path - ) - self._server = _server[0] - - -class SockSite(BaseSite): - __slots__ = ("_sock", "_name") - - def __init__( - self, - runner: "BaseRunner", - sock: socket.socket, - *, - shutdown_timeout: float = 60.0, - ssl_context: Optional[SSLContext] = None, - backlog: int = 128, - ) -> None: - super().__init__( - runner, - shutdown_timeout=shutdown_timeout, - ssl_context=ssl_context, - backlog=backlog, - ) - self._sock = sock - scheme = "https" if self._ssl_context else "http" - if hasattr(socket, "AF_UNIX") and sock.family == socket.AF_UNIX: - name = f"{scheme}://unix:{sock.getsockname()}:" - else: - host, port = sock.getsockname()[:2] - name = str(URL.build(scheme=scheme, host=host, port=port)) - self._name = name - - @property - def name(self) -> str: - return self._name - - async def start(self) -> None: - await super().start() - loop = asyncio.get_event_loop() - server = self._runner.server - assert server is not None - self._server = await loop.create_server( - server, sock=self._sock, ssl=self._ssl_context, backlog=self._backlog - ) - - -class BaseRunner(ABC): - __slots__ = ( - "shutdown_callback", - "_handle_signals", - "_kwargs", - "_server", - "_sites", - "_shutdown_timeout", - ) - - def __init__( - self, - *, - handle_signals: bool = False, - shutdown_timeout: float = 60.0, - **kwargs: Any, - ) -> None: - self.shutdown_callback: Optional[Callable[[], Awaitable[None]]] = None - self._handle_signals = handle_signals - self._kwargs = kwargs - self._server: Optional[Server] = None - self._sites: List[BaseSite] = [] - self._shutdown_timeout = shutdown_timeout - - @property - def server(self) -> Optional[Server]: - return self._server - - @property - def addresses(self) -> List[Any]: - ret: List[Any] = [] - for site in self._sites: - server = site._server - if server is not None: - sockets = server.sockets # type: ignore[attr-defined] - if sockets is not None: - for sock in sockets: - ret.append(sock.getsockname()) - return ret - - @property - def sites(self) -> Set[BaseSite]: - return set(self._sites) - - async def setup(self) -> None: - loop = asyncio.get_event_loop() - - if self._handle_signals: - try: - loop.add_signal_handler(signal.SIGINT, _raise_graceful_exit) - loop.add_signal_handler(signal.SIGTERM, _raise_graceful_exit) - except NotImplementedError: # pragma: no cover - # add_signal_handler is not implemented on Windows - pass - - self._server = await self._make_server() - - @abstractmethod - async def shutdown(self) -> None: - """Call any shutdown hooks to help server close gracefully.""" - - async def cleanup(self) -> None: - # The loop over sites is intentional, an exception on gather() - # leaves self._sites in unpredictable state. - # The loop guaranties that a site is either deleted on success or - # still present on failure - for site in list(self._sites): - await site.stop() - - if self._server: # If setup succeeded - self._server.pre_shutdown() - await self.shutdown() - - if self.shutdown_callback: - await self.shutdown_callback() - - await self._server.shutdown(self._shutdown_timeout) - await self._cleanup_server() - - self._server = None - if self._handle_signals: - loop = asyncio.get_running_loop() - try: - loop.remove_signal_handler(signal.SIGINT) - loop.remove_signal_handler(signal.SIGTERM) - except NotImplementedError: # pragma: no cover - # remove_signal_handler is not implemented on Windows - pass - - @abstractmethod - async def _make_server(self) -> Server: - pass # pragma: no cover - - @abstractmethod - async def _cleanup_server(self) -> None: - pass # pragma: no cover - - def _reg_site(self, site: BaseSite) -> None: - if site in self._sites: - raise RuntimeError(f"Site {site} is already registered in runner {self}") - self._sites.append(site) - - def _check_site(self, site: BaseSite) -> None: - if site not in self._sites: - raise RuntimeError(f"Site {site} is not registered in runner {self}") - - def _unreg_site(self, site: BaseSite) -> None: - if site not in self._sites: - raise RuntimeError(f"Site {site} is not registered in runner {self}") - self._sites.remove(site) - - -class ServerRunner(BaseRunner): - """Low-level web server runner""" - - __slots__ = ("_web_server",) - - def __init__( - self, web_server: Server, *, handle_signals: bool = False, **kwargs: Any - ) -> None: - super().__init__(handle_signals=handle_signals, **kwargs) - self._web_server = web_server - - async def shutdown(self) -> None: - pass - - async def _make_server(self) -> Server: - return self._web_server - - async def _cleanup_server(self) -> None: - pass - - -class AppRunner(BaseRunner): - """Web Application runner""" - - __slots__ = ("_app",) - - def __init__( - self, app: Application, *, handle_signals: bool = False, **kwargs: Any - ) -> None: - super().__init__(handle_signals=handle_signals, **kwargs) - if not isinstance(app, Application): - raise TypeError( - "The first argument should be web.Application " - "instance, got {!r}".format(app) - ) - self._app = app - - @property - def app(self) -> Application: - return self._app - - async def shutdown(self) -> None: - await self._app.shutdown() - - async def _make_server(self) -> Server: - loop = asyncio.get_event_loop() - self._app._set_loop(loop) - self._app.on_startup.freeze() - await self._app.startup() - self._app.freeze() - - return self._app._make_handler(loop=loop, **self._kwargs) - - async def _cleanup_server(self) -> None: - await self._app.cleanup() diff --git a/.venv/Lib/site-packages/aiohttp/web_server.py b/.venv/Lib/site-packages/aiohttp/web_server.py deleted file mode 100644 index 52faacb..0000000 --- a/.venv/Lib/site-packages/aiohttp/web_server.py +++ /dev/null @@ -1,77 +0,0 @@ -"""Low level HTTP server.""" -import asyncio -from typing import Any, Awaitable, Callable, Dict, List, Optional # noqa - -from .abc import AbstractStreamWriter -from .helpers import get_running_loop -from .http_parser import RawRequestMessage -from .streams import StreamReader -from .web_protocol import RequestHandler, _RequestFactory, _RequestHandler -from .web_request import BaseRequest - -__all__ = ("Server",) - - -class Server: - def __init__( - self, - handler: _RequestHandler, - *, - request_factory: Optional[_RequestFactory] = None, - handler_cancellation: bool = False, - loop: Optional[asyncio.AbstractEventLoop] = None, - **kwargs: Any - ) -> None: - self._loop = get_running_loop(loop) - self._connections: Dict[RequestHandler, asyncio.Transport] = {} - self._kwargs = kwargs - self.requests_count = 0 - self.request_handler = handler - self.request_factory = request_factory or self._make_request - self.handler_cancellation = handler_cancellation - - @property - def connections(self) -> List[RequestHandler]: - return list(self._connections.keys()) - - def connection_made( - self, handler: RequestHandler, transport: asyncio.Transport - ) -> None: - self._connections[handler] = transport - - def connection_lost( - self, handler: RequestHandler, exc: Optional[BaseException] = None - ) -> None: - if handler in self._connections: - del self._connections[handler] - - def _make_request( - self, - message: RawRequestMessage, - payload: StreamReader, - protocol: RequestHandler, - writer: AbstractStreamWriter, - task: "asyncio.Task[None]", - ) -> BaseRequest: - return BaseRequest(message, payload, protocol, writer, task, self._loop) - - def pre_shutdown(self) -> None: - for conn in self._connections: - conn.close() - - async def shutdown(self, timeout: Optional[float] = None) -> None: - coros = (conn.shutdown(timeout) for conn in self._connections) - await asyncio.gather(*coros) - self._connections.clear() - - def __call__(self) -> RequestHandler: - try: - return RequestHandler(self, loop=self._loop, **self._kwargs) - except TypeError: - # Failsafe creation: remove all custom handler_args - kwargs = { - k: v - for k, v in self._kwargs.items() - if k in ["debug", "access_log_class"] - } - return RequestHandler(self, loop=self._loop, **kwargs) diff --git a/.venv/Lib/site-packages/aiohttp/web_urldispatcher.py b/.venv/Lib/site-packages/aiohttp/web_urldispatcher.py deleted file mode 100644 index 9969653..0000000 --- a/.venv/Lib/site-packages/aiohttp/web_urldispatcher.py +++ /dev/null @@ -1,1232 +0,0 @@ -import abc -import asyncio -import base64 -import hashlib -import inspect -import keyword -import os -import re -import warnings -from contextlib import contextmanager -from functools import wraps -from pathlib import Path -from types import MappingProxyType -from typing import ( - TYPE_CHECKING, - Any, - Awaitable, - Callable, - Container, - Dict, - Final, - Generator, - Iterable, - Iterator, - List, - Mapping, - NoReturn, - Optional, - Pattern, - Set, - Sized, - Tuple, - Type, - TypedDict, - Union, - cast, -) - -from yarl import URL, __version__ as yarl_version # type: ignore[attr-defined] - -from . import hdrs -from .abc import AbstractMatchInfo, AbstractRouter, AbstractView -from .helpers import DEBUG -from .http import HttpVersion11 -from .typedefs import Handler, PathLike -from .web_exceptions import ( - HTTPException, - HTTPExpectationFailed, - HTTPForbidden, - HTTPMethodNotAllowed, - HTTPNotFound, -) -from .web_fileresponse import FileResponse -from .web_request import Request -from .web_response import Response, StreamResponse -from .web_routedef import AbstractRouteDef - -__all__ = ( - "UrlDispatcher", - "UrlMappingMatchInfo", - "AbstractResource", - "Resource", - "PlainResource", - "DynamicResource", - "AbstractRoute", - "ResourceRoute", - "StaticResource", - "View", -) - - -if TYPE_CHECKING: - from .web_app import Application - - BaseDict = Dict[str, str] -else: - BaseDict = dict - -YARL_VERSION: Final[Tuple[int, ...]] = tuple(map(int, yarl_version.split(".")[:2])) - -HTTP_METHOD_RE: Final[Pattern[str]] = re.compile( - r"^[0-9A-Za-z!#\$%&'\*\+\-\.\^_`\|~]+$" -) -ROUTE_RE: Final[Pattern[str]] = re.compile( - r"(\{[_a-zA-Z][^{}]*(?:\{[^{}]*\}[^{}]*)*\})" -) -PATH_SEP: Final[str] = re.escape("/") - - -_ExpectHandler = Callable[[Request], Awaitable[Optional[StreamResponse]]] -_Resolve = Tuple[Optional["UrlMappingMatchInfo"], Set[str]] - - -class _InfoDict(TypedDict, total=False): - path: str - - formatter: str - pattern: Pattern[str] - - directory: Path - prefix: str - routes: Mapping[str, "AbstractRoute"] - - app: "Application" - - domain: str - - rule: "AbstractRuleMatching" - - http_exception: HTTPException - - -class AbstractResource(Sized, Iterable["AbstractRoute"]): - def __init__(self, *, name: Optional[str] = None) -> None: - self._name = name - - @property - def name(self) -> Optional[str]: - return self._name - - @property - @abc.abstractmethod - def canonical(self) -> str: - """Exposes the resource's canonical path. - - For example '/foo/bar/{name}' - - """ - - @abc.abstractmethod # pragma: no branch - def url_for(self, **kwargs: str) -> URL: - """Construct url for resource with additional params.""" - - @abc.abstractmethod # pragma: no branch - async def resolve(self, request: Request) -> _Resolve: - """Resolve resource. - - Return (UrlMappingMatchInfo, allowed_methods) pair. - """ - - @abc.abstractmethod - def add_prefix(self, prefix: str) -> None: - """Add a prefix to processed URLs. - - Required for subapplications support. - """ - - @abc.abstractmethod - def get_info(self) -> _InfoDict: - """Return a dict with additional info useful for introspection""" - - def freeze(self) -> None: - pass - - @abc.abstractmethod - def raw_match(self, path: str) -> bool: - """Perform a raw match against path""" - - -class AbstractRoute(abc.ABC): - def __init__( - self, - method: str, - handler: Union[Handler, Type[AbstractView]], - *, - expect_handler: Optional[_ExpectHandler] = None, - resource: Optional[AbstractResource] = None, - ) -> None: - - if expect_handler is None: - expect_handler = _default_expect_handler - - assert asyncio.iscoroutinefunction( - expect_handler - ), f"Coroutine is expected, got {expect_handler!r}" - - method = method.upper() - if not HTTP_METHOD_RE.match(method): - raise ValueError(f"{method} is not allowed HTTP method") - - assert callable(handler), handler - if asyncio.iscoroutinefunction(handler): - pass - elif inspect.isgeneratorfunction(handler): - warnings.warn( - "Bare generators are deprecated, " "use @coroutine wrapper", - DeprecationWarning, - ) - elif isinstance(handler, type) and issubclass(handler, AbstractView): - pass - else: - warnings.warn( - "Bare functions are deprecated, " "use async ones", DeprecationWarning - ) - - @wraps(handler) - async def handler_wrapper(request: Request) -> StreamResponse: - result = old_handler(request) - if asyncio.iscoroutine(result): - result = await result - assert isinstance(result, StreamResponse) - return result - - old_handler = handler - handler = handler_wrapper - - self._method = method - self._handler = handler - self._expect_handler = expect_handler - self._resource = resource - - @property - def method(self) -> str: - return self._method - - @property - def handler(self) -> Handler: - return self._handler - - @property - @abc.abstractmethod - def name(self) -> Optional[str]: - """Optional route's name, always equals to resource's name.""" - - @property - def resource(self) -> Optional[AbstractResource]: - return self._resource - - @abc.abstractmethod - def get_info(self) -> _InfoDict: - """Return a dict with additional info useful for introspection""" - - @abc.abstractmethod # pragma: no branch - def url_for(self, *args: str, **kwargs: str) -> URL: - """Construct url for route with additional params.""" - - async def handle_expect_header(self, request: Request) -> Optional[StreamResponse]: - return await self._expect_handler(request) - - -class UrlMappingMatchInfo(BaseDict, AbstractMatchInfo): - def __init__(self, match_dict: Dict[str, str], route: AbstractRoute): - super().__init__(match_dict) - self._route = route - self._apps: List[Application] = [] - self._current_app: Optional[Application] = None - self._frozen = False - - @property - def handler(self) -> Handler: - return self._route.handler - - @property - def route(self) -> AbstractRoute: - return self._route - - @property - def expect_handler(self) -> _ExpectHandler: - return self._route.handle_expect_header - - @property - def http_exception(self) -> Optional[HTTPException]: - return None - - def get_info(self) -> _InfoDict: # type: ignore[override] - return self._route.get_info() - - @property - def apps(self) -> Tuple["Application", ...]: - return tuple(self._apps) - - def add_app(self, app: "Application") -> None: - if self._frozen: - raise RuntimeError("Cannot change apps stack after .freeze() call") - if self._current_app is None: - self._current_app = app - self._apps.insert(0, app) - - @property - def current_app(self) -> "Application": - app = self._current_app - assert app is not None - return app - - @contextmanager - def set_current_app(self, app: "Application") -> Generator[None, None, None]: - if DEBUG: # pragma: no cover - if app not in self._apps: - raise RuntimeError( - "Expected one of the following apps {!r}, got {!r}".format( - self._apps, app - ) - ) - prev = self._current_app - self._current_app = app - try: - yield - finally: - self._current_app = prev - - def freeze(self) -> None: - self._frozen = True - - def __repr__(self) -> str: - return f"" - - -class MatchInfoError(UrlMappingMatchInfo): - def __init__(self, http_exception: HTTPException) -> None: - self._exception = http_exception - super().__init__({}, SystemRoute(self._exception)) - - @property - def http_exception(self) -> HTTPException: - return self._exception - - def __repr__(self) -> str: - return "".format( - self._exception.status, self._exception.reason - ) - - -async def _default_expect_handler(request: Request) -> None: - """Default handler for Expect header. - - Just send "100 Continue" to client. - raise HTTPExpectationFailed if value of header is not "100-continue" - """ - expect = request.headers.get(hdrs.EXPECT, "") - if request.version == HttpVersion11: - if expect.lower() == "100-continue": - await request.writer.write(b"HTTP/1.1 100 Continue\r\n\r\n") - else: - raise HTTPExpectationFailed(text="Unknown Expect: %s" % expect) - - -class Resource(AbstractResource): - def __init__(self, *, name: Optional[str] = None) -> None: - super().__init__(name=name) - self._routes: List[ResourceRoute] = [] - - def add_route( - self, - method: str, - handler: Union[Type[AbstractView], Handler], - *, - expect_handler: Optional[_ExpectHandler] = None, - ) -> "ResourceRoute": - - for route_obj in self._routes: - if route_obj.method == method or route_obj.method == hdrs.METH_ANY: - raise RuntimeError( - "Added route will never be executed, " - "method {route.method} is already " - "registered".format(route=route_obj) - ) - - route_obj = ResourceRoute(method, handler, self, expect_handler=expect_handler) - self.register_route(route_obj) - return route_obj - - def register_route(self, route: "ResourceRoute") -> None: - assert isinstance( - route, ResourceRoute - ), f"Instance of Route class is required, got {route!r}" - self._routes.append(route) - - async def resolve(self, request: Request) -> _Resolve: - allowed_methods: Set[str] = set() - - match_dict = self._match(request.rel_url.raw_path) - if match_dict is None: - return None, allowed_methods - - for route_obj in self._routes: - route_method = route_obj.method - allowed_methods.add(route_method) - - if route_method == request.method or route_method == hdrs.METH_ANY: - return (UrlMappingMatchInfo(match_dict, route_obj), allowed_methods) - else: - return None, allowed_methods - - @abc.abstractmethod - def _match(self, path: str) -> Optional[Dict[str, str]]: - pass # pragma: no cover - - def __len__(self) -> int: - return len(self._routes) - - def __iter__(self) -> Iterator["ResourceRoute"]: - return iter(self._routes) - - # TODO: implement all abstract methods - - -class PlainResource(Resource): - def __init__(self, path: str, *, name: Optional[str] = None) -> None: - super().__init__(name=name) - assert not path or path.startswith("/") - self._path = path - - @property - def canonical(self) -> str: - return self._path - - def freeze(self) -> None: - if not self._path: - self._path = "/" - - def add_prefix(self, prefix: str) -> None: - assert prefix.startswith("/") - assert not prefix.endswith("/") - assert len(prefix) > 1 - self._path = prefix + self._path - - def _match(self, path: str) -> Optional[Dict[str, str]]: - # string comparison is about 10 times faster than regexp matching - if self._path == path: - return {} - else: - return None - - def raw_match(self, path: str) -> bool: - return self._path == path - - def get_info(self) -> _InfoDict: - return {"path": self._path} - - def url_for(self) -> URL: # type: ignore[override] - return URL.build(path=self._path, encoded=True) - - def __repr__(self) -> str: - name = "'" + self.name + "' " if self.name is not None else "" - return f"" - - -class DynamicResource(Resource): - - DYN = re.compile(r"\{(?P[_a-zA-Z][_a-zA-Z0-9]*)\}") - DYN_WITH_RE = re.compile(r"\{(?P[_a-zA-Z][_a-zA-Z0-9]*):(?P.+)\}") - GOOD = r"[^{}/]+" - - def __init__(self, path: str, *, name: Optional[str] = None) -> None: - super().__init__(name=name) - pattern = "" - formatter = "" - for part in ROUTE_RE.split(path): - match = self.DYN.fullmatch(part) - if match: - pattern += "(?P<{}>{})".format(match.group("var"), self.GOOD) - formatter += "{" + match.group("var") + "}" - continue - - match = self.DYN_WITH_RE.fullmatch(part) - if match: - pattern += "(?P<{var}>{re})".format(**match.groupdict()) - formatter += "{" + match.group("var") + "}" - continue - - if "{" in part or "}" in part: - raise ValueError(f"Invalid path '{path}'['{part}']") - - part = _requote_path(part) - formatter += part - pattern += re.escape(part) - - try: - compiled = re.compile(pattern) - except re.error as exc: - raise ValueError(f"Bad pattern '{pattern}': {exc}") from None - assert compiled.pattern.startswith(PATH_SEP) - assert formatter.startswith("/") - self._pattern = compiled - self._formatter = formatter - - @property - def canonical(self) -> str: - return self._formatter - - def add_prefix(self, prefix: str) -> None: - assert prefix.startswith("/") - assert not prefix.endswith("/") - assert len(prefix) > 1 - self._pattern = re.compile(re.escape(prefix) + self._pattern.pattern) - self._formatter = prefix + self._formatter - - def _match(self, path: str) -> Optional[Dict[str, str]]: - match = self._pattern.fullmatch(path) - if match is None: - return None - else: - return { - key: _unquote_path(value) for key, value in match.groupdict().items() - } - - def raw_match(self, path: str) -> bool: - return self._formatter == path - - def get_info(self) -> _InfoDict: - return {"formatter": self._formatter, "pattern": self._pattern} - - def url_for(self, **parts: str) -> URL: - url = self._formatter.format_map({k: _quote_path(v) for k, v in parts.items()}) - return URL.build(path=url, encoded=True) - - def __repr__(self) -> str: - name = "'" + self.name + "' " if self.name is not None else "" - return "".format( - name=name, formatter=self._formatter - ) - - -class PrefixResource(AbstractResource): - def __init__(self, prefix: str, *, name: Optional[str] = None) -> None: - assert not prefix or prefix.startswith("/"), prefix - assert prefix in ("", "/") or not prefix.endswith("/"), prefix - super().__init__(name=name) - self._prefix = _requote_path(prefix) - self._prefix2 = self._prefix + "/" - - @property - def canonical(self) -> str: - return self._prefix - - def add_prefix(self, prefix: str) -> None: - assert prefix.startswith("/") - assert not prefix.endswith("/") - assert len(prefix) > 1 - self._prefix = prefix + self._prefix - self._prefix2 = self._prefix + "/" - - def raw_match(self, prefix: str) -> bool: - return False - - # TODO: impl missing abstract methods - - -class StaticResource(PrefixResource): - VERSION_KEY = "v" - - def __init__( - self, - prefix: str, - directory: PathLike, - *, - name: Optional[str] = None, - expect_handler: Optional[_ExpectHandler] = None, - chunk_size: int = 256 * 1024, - show_index: bool = False, - follow_symlinks: bool = False, - append_version: bool = False, - ) -> None: - super().__init__(prefix, name=name) - try: - directory = Path(directory) - if str(directory).startswith("~"): - directory = Path(os.path.expanduser(str(directory))) - directory = directory.resolve() - if not directory.is_dir(): - raise ValueError("Not a directory") - except (FileNotFoundError, ValueError) as error: - raise ValueError(f"No directory exists at '{directory}'") from error - self._directory = directory - self._show_index = show_index - self._chunk_size = chunk_size - self._follow_symlinks = follow_symlinks - self._expect_handler = expect_handler - self._append_version = append_version - - self._routes = { - "GET": ResourceRoute( - "GET", self._handle, self, expect_handler=expect_handler - ), - "HEAD": ResourceRoute( - "HEAD", self._handle, self, expect_handler=expect_handler - ), - } - - def url_for( # type: ignore[override] - self, - *, - filename: PathLike, - append_version: Optional[bool] = None, - ) -> URL: - if append_version is None: - append_version = self._append_version - filename = str(filename).lstrip("/") - - url = URL.build(path=self._prefix, encoded=True) - # filename is not encoded - if YARL_VERSION < (1, 6): - url = url / filename.replace("%", "%25") - else: - url = url / filename - - if append_version: - unresolved_path = self._directory.joinpath(filename) - try: - if self._follow_symlinks: - normalized_path = Path(os.path.normpath(unresolved_path)) - normalized_path.relative_to(self._directory) - filepath = normalized_path.resolve() - else: - filepath = unresolved_path.resolve() - filepath.relative_to(self._directory) - except (ValueError, FileNotFoundError): - # ValueError for case when path point to symlink - # with follow_symlinks is False - return url # relatively safe - if filepath.is_file(): - # TODO cache file content - # with file watcher for cache invalidation - with filepath.open("rb") as f: - file_bytes = f.read() - h = self._get_file_hash(file_bytes) - url = url.with_query({self.VERSION_KEY: h}) - return url - return url - - @staticmethod - def _get_file_hash(byte_array: bytes) -> str: - m = hashlib.sha256() # todo sha256 can be configurable param - m.update(byte_array) - b64 = base64.urlsafe_b64encode(m.digest()) - return b64.decode("ascii") - - def get_info(self) -> _InfoDict: - return { - "directory": self._directory, - "prefix": self._prefix, - "routes": self._routes, - } - - def set_options_route(self, handler: Handler) -> None: - if "OPTIONS" in self._routes: - raise RuntimeError("OPTIONS route was set already") - self._routes["OPTIONS"] = ResourceRoute( - "OPTIONS", handler, self, expect_handler=self._expect_handler - ) - - async def resolve(self, request: Request) -> _Resolve: - path = request.rel_url.raw_path - method = request.method - allowed_methods = set(self._routes) - if not path.startswith(self._prefix2) and path != self._prefix: - return None, set() - - if method not in allowed_methods: - return None, allowed_methods - - match_dict = {"filename": _unquote_path(path[len(self._prefix) + 1 :])} - return (UrlMappingMatchInfo(match_dict, self._routes[method]), allowed_methods) - - def __len__(self) -> int: - return len(self._routes) - - def __iter__(self) -> Iterator[AbstractRoute]: - return iter(self._routes.values()) - - async def _handle(self, request: Request) -> StreamResponse: - rel_url = request.match_info["filename"] - try: - filename = Path(rel_url) - if filename.anchor: - # rel_url is an absolute name like - # /static/\\machine_name\c$ or /static/D:\path - # where the static dir is totally different - raise HTTPForbidden() - unresolved_path = self._directory.joinpath(filename) - if self._follow_symlinks: - normalized_path = Path(os.path.normpath(unresolved_path)) - normalized_path.relative_to(self._directory) - filepath = normalized_path.resolve() - else: - filepath = unresolved_path.resolve() - filepath.relative_to(self._directory) - except (ValueError, FileNotFoundError) as error: - # relatively safe - raise HTTPNotFound() from error - except HTTPForbidden: - raise - except Exception as error: - # perm error or other kind! - request.app.logger.exception(error) - raise HTTPNotFound() from error - - # on opening a dir, load its contents if allowed - if filepath.is_dir(): - if self._show_index: - try: - return Response( - text=self._directory_as_html(filepath), content_type="text/html" - ) - except PermissionError: - raise HTTPForbidden() - else: - raise HTTPForbidden() - elif filepath.is_file(): - return FileResponse(filepath, chunk_size=self._chunk_size) - else: - raise HTTPNotFound - - def _directory_as_html(self, filepath: Path) -> str: - # returns directory's index as html - - # sanity check - assert filepath.is_dir() - - relative_path_to_dir = filepath.relative_to(self._directory).as_posix() - index_of = f"Index of /{relative_path_to_dir}" - h1 = f"

{index_of}

" - - index_list = [] - dir_index = filepath.iterdir() - for _file in sorted(dir_index): - # show file url as relative to static path - rel_path = _file.relative_to(self._directory).as_posix() - file_url = self._prefix + "/" + rel_path - - # if file is a directory, add '/' to the end of the name - if _file.is_dir(): - file_name = f"{_file.name}/" - else: - file_name = _file.name - - index_list.append( - '
  • {name}
  • '.format( - url=file_url, name=file_name - ) - ) - ul = "
      \n{}\n
    ".format("\n".join(index_list)) - body = f"\n{h1}\n{ul}\n" - - head_str = f"\n{index_of}\n" - html = f"\n{head_str}\n{body}\n" - - return html - - def __repr__(self) -> str: - name = "'" + self.name + "'" if self.name is not None else "" - return " {directory!r}>".format( - name=name, path=self._prefix, directory=self._directory - ) - - -class PrefixedSubAppResource(PrefixResource): - def __init__(self, prefix: str, app: "Application") -> None: - super().__init__(prefix) - self._app = app - for resource in app.router.resources(): - resource.add_prefix(prefix) - - def add_prefix(self, prefix: str) -> None: - super().add_prefix(prefix) - for resource in self._app.router.resources(): - resource.add_prefix(prefix) - - def url_for(self, *args: str, **kwargs: str) -> URL: - raise RuntimeError(".url_for() is not supported " "by sub-application root") - - def get_info(self) -> _InfoDict: - return {"app": self._app, "prefix": self._prefix} - - async def resolve(self, request: Request) -> _Resolve: - if ( - not request.url.raw_path.startswith(self._prefix2) - and request.url.raw_path != self._prefix - ): - return None, set() - match_info = await self._app.router.resolve(request) - match_info.add_app(self._app) - if isinstance(match_info.http_exception, HTTPMethodNotAllowed): - methods = match_info.http_exception.allowed_methods - else: - methods = set() - return match_info, methods - - def __len__(self) -> int: - return len(self._app.router.routes()) - - def __iter__(self) -> Iterator[AbstractRoute]: - return iter(self._app.router.routes()) - - def __repr__(self) -> str: - return " {app!r}>".format( - prefix=self._prefix, app=self._app - ) - - -class AbstractRuleMatching(abc.ABC): - @abc.abstractmethod # pragma: no branch - async def match(self, request: Request) -> bool: - """Return bool if the request satisfies the criteria""" - - @abc.abstractmethod # pragma: no branch - def get_info(self) -> _InfoDict: - """Return a dict with additional info useful for introspection""" - - @property - @abc.abstractmethod # pragma: no branch - def canonical(self) -> str: - """Return a str""" - - -class Domain(AbstractRuleMatching): - re_part = re.compile(r"(?!-)[a-z\d-]{1,63}(? None: - super().__init__() - self._domain = self.validation(domain) - - @property - def canonical(self) -> str: - return self._domain - - def validation(self, domain: str) -> str: - if not isinstance(domain, str): - raise TypeError("Domain must be str") - domain = domain.rstrip(".").lower() - if not domain: - raise ValueError("Domain cannot be empty") - elif "://" in domain: - raise ValueError("Scheme not supported") - url = URL("http://" + domain) - assert url.raw_host is not None - if not all(self.re_part.fullmatch(x) for x in url.raw_host.split(".")): - raise ValueError("Domain not valid") - if url.port == 80: - return url.raw_host - return f"{url.raw_host}:{url.port}" - - async def match(self, request: Request) -> bool: - host = request.headers.get(hdrs.HOST) - if not host: - return False - return self.match_domain(host) - - def match_domain(self, host: str) -> bool: - return host.lower() == self._domain - - def get_info(self) -> _InfoDict: - return {"domain": self._domain} - - -class MaskDomain(Domain): - re_part = re.compile(r"(?!-)[a-z\d\*-]{1,63}(? None: - super().__init__(domain) - mask = self._domain.replace(".", r"\.").replace("*", ".*") - self._mask = re.compile(mask) - - @property - def canonical(self) -> str: - return self._mask.pattern - - def match_domain(self, host: str) -> bool: - return self._mask.fullmatch(host) is not None - - -class MatchedSubAppResource(PrefixedSubAppResource): - def __init__(self, rule: AbstractRuleMatching, app: "Application") -> None: - AbstractResource.__init__(self) - self._prefix = "" - self._app = app - self._rule = rule - - @property - def canonical(self) -> str: - return self._rule.canonical - - def get_info(self) -> _InfoDict: - return {"app": self._app, "rule": self._rule} - - async def resolve(self, request: Request) -> _Resolve: - if not await self._rule.match(request): - return None, set() - match_info = await self._app.router.resolve(request) - match_info.add_app(self._app) - if isinstance(match_info.http_exception, HTTPMethodNotAllowed): - methods = match_info.http_exception.allowed_methods - else: - methods = set() - return match_info, methods - - def __repr__(self) -> str: - return " {app!r}>" "".format(app=self._app) - - -class ResourceRoute(AbstractRoute): - """A route with resource""" - - def __init__( - self, - method: str, - handler: Union[Handler, Type[AbstractView]], - resource: AbstractResource, - *, - expect_handler: Optional[_ExpectHandler] = None, - ) -> None: - super().__init__( - method, handler, expect_handler=expect_handler, resource=resource - ) - - def __repr__(self) -> str: - return " {handler!r}".format( - method=self.method, resource=self._resource, handler=self.handler - ) - - @property - def name(self) -> Optional[str]: - if self._resource is None: - return None - return self._resource.name - - def url_for(self, *args: str, **kwargs: str) -> URL: - """Construct url for route with additional params.""" - assert self._resource is not None - return self._resource.url_for(*args, **kwargs) - - def get_info(self) -> _InfoDict: - assert self._resource is not None - return self._resource.get_info() - - -class SystemRoute(AbstractRoute): - def __init__(self, http_exception: HTTPException) -> None: - super().__init__(hdrs.METH_ANY, self._handle) - self._http_exception = http_exception - - def url_for(self, *args: str, **kwargs: str) -> URL: - raise RuntimeError(".url_for() is not allowed for SystemRoute") - - @property - def name(self) -> Optional[str]: - return None - - def get_info(self) -> _InfoDict: - return {"http_exception": self._http_exception} - - async def _handle(self, request: Request) -> StreamResponse: - raise self._http_exception - - @property - def status(self) -> int: - return self._http_exception.status - - @property - def reason(self) -> str: - return self._http_exception.reason - - def __repr__(self) -> str: - return "".format(self=self) - - -class View(AbstractView): - async def _iter(self) -> StreamResponse: - if self.request.method not in hdrs.METH_ALL: - self._raise_allowed_methods() - method: Optional[Callable[[], Awaitable[StreamResponse]]] - method = getattr(self, self.request.method.lower(), None) - if method is None: - self._raise_allowed_methods() - ret = await method() - assert isinstance(ret, StreamResponse) - return ret - - def __await__(self) -> Generator[Any, None, StreamResponse]: - return self._iter().__await__() - - def _raise_allowed_methods(self) -> NoReturn: - allowed_methods = {m for m in hdrs.METH_ALL if hasattr(self, m.lower())} - raise HTTPMethodNotAllowed(self.request.method, allowed_methods) - - -class ResourcesView(Sized, Iterable[AbstractResource], Container[AbstractResource]): - def __init__(self, resources: List[AbstractResource]) -> None: - self._resources = resources - - def __len__(self) -> int: - return len(self._resources) - - def __iter__(self) -> Iterator[AbstractResource]: - yield from self._resources - - def __contains__(self, resource: object) -> bool: - return resource in self._resources - - -class RoutesView(Sized, Iterable[AbstractRoute], Container[AbstractRoute]): - def __init__(self, resources: List[AbstractResource]): - self._routes: List[AbstractRoute] = [] - for resource in resources: - for route in resource: - self._routes.append(route) - - def __len__(self) -> int: - return len(self._routes) - - def __iter__(self) -> Iterator[AbstractRoute]: - yield from self._routes - - def __contains__(self, route: object) -> bool: - return route in self._routes - - -class UrlDispatcher(AbstractRouter, Mapping[str, AbstractResource]): - - NAME_SPLIT_RE = re.compile(r"[.:-]") - - def __init__(self) -> None: - super().__init__() - self._resources: List[AbstractResource] = [] - self._named_resources: Dict[str, AbstractResource] = {} - - async def resolve(self, request: Request) -> UrlMappingMatchInfo: - method = request.method - allowed_methods: Set[str] = set() - - for resource in self._resources: - match_dict, allowed = await resource.resolve(request) - if match_dict is not None: - return match_dict - else: - allowed_methods |= allowed - - if allowed_methods: - return MatchInfoError(HTTPMethodNotAllowed(method, allowed_methods)) - else: - return MatchInfoError(HTTPNotFound()) - - def __iter__(self) -> Iterator[str]: - return iter(self._named_resources) - - def __len__(self) -> int: - return len(self._named_resources) - - def __contains__(self, resource: object) -> bool: - return resource in self._named_resources - - def __getitem__(self, name: str) -> AbstractResource: - return self._named_resources[name] - - def resources(self) -> ResourcesView: - return ResourcesView(self._resources) - - def routes(self) -> RoutesView: - return RoutesView(self._resources) - - def named_resources(self) -> Mapping[str, AbstractResource]: - return MappingProxyType(self._named_resources) - - def register_resource(self, resource: AbstractResource) -> None: - assert isinstance( - resource, AbstractResource - ), f"Instance of AbstractResource class is required, got {resource!r}" - if self.frozen: - raise RuntimeError("Cannot register a resource into frozen router.") - - name = resource.name - - if name is not None: - parts = self.NAME_SPLIT_RE.split(name) - for part in parts: - if keyword.iskeyword(part): - raise ValueError( - f"Incorrect route name {name!r}, " - "python keywords cannot be used " - "for route name" - ) - if not part.isidentifier(): - raise ValueError( - "Incorrect route name {!r}, " - "the name should be a sequence of " - "python identifiers separated " - "by dash, dot or column".format(name) - ) - if name in self._named_resources: - raise ValueError( - "Duplicate {!r}, " - "already handled by {!r}".format(name, self._named_resources[name]) - ) - self._named_resources[name] = resource - self._resources.append(resource) - - def add_resource(self, path: str, *, name: Optional[str] = None) -> Resource: - if path and not path.startswith("/"): - raise ValueError("path should be started with / or be empty") - # Reuse last added resource if path and name are the same - if self._resources: - resource = self._resources[-1] - if resource.name == name and resource.raw_match(path): - return cast(Resource, resource) - if not ("{" in path or "}" in path or ROUTE_RE.search(path)): - resource = PlainResource(_requote_path(path), name=name) - self.register_resource(resource) - return resource - resource = DynamicResource(path, name=name) - self.register_resource(resource) - return resource - - def add_route( - self, - method: str, - path: str, - handler: Union[Handler, Type[AbstractView]], - *, - name: Optional[str] = None, - expect_handler: Optional[_ExpectHandler] = None, - ) -> AbstractRoute: - resource = self.add_resource(path, name=name) - return resource.add_route(method, handler, expect_handler=expect_handler) - - def add_static( - self, - prefix: str, - path: PathLike, - *, - name: Optional[str] = None, - expect_handler: Optional[_ExpectHandler] = None, - chunk_size: int = 256 * 1024, - show_index: bool = False, - follow_symlinks: bool = False, - append_version: bool = False, - ) -> AbstractResource: - """Add static files view. - - prefix - url prefix - path - folder with files - - """ - assert prefix.startswith("/") - if prefix.endswith("/"): - prefix = prefix[:-1] - resource = StaticResource( - prefix, - path, - name=name, - expect_handler=expect_handler, - chunk_size=chunk_size, - show_index=show_index, - follow_symlinks=follow_symlinks, - append_version=append_version, - ) - self.register_resource(resource) - return resource - - def add_head(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute: - """Shortcut for add_route with method HEAD.""" - return self.add_route(hdrs.METH_HEAD, path, handler, **kwargs) - - def add_options(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute: - """Shortcut for add_route with method OPTIONS.""" - return self.add_route(hdrs.METH_OPTIONS, path, handler, **kwargs) - - def add_get( - self, - path: str, - handler: Handler, - *, - name: Optional[str] = None, - allow_head: bool = True, - **kwargs: Any, - ) -> AbstractRoute: - """Shortcut for add_route with method GET. - - If allow_head is true, another - route is added allowing head requests to the same endpoint. - """ - resource = self.add_resource(path, name=name) - if allow_head: - resource.add_route(hdrs.METH_HEAD, handler, **kwargs) - return resource.add_route(hdrs.METH_GET, handler, **kwargs) - - def add_post(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute: - """Shortcut for add_route with method POST.""" - return self.add_route(hdrs.METH_POST, path, handler, **kwargs) - - def add_put(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute: - """Shortcut for add_route with method PUT.""" - return self.add_route(hdrs.METH_PUT, path, handler, **kwargs) - - def add_patch(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute: - """Shortcut for add_route with method PATCH.""" - return self.add_route(hdrs.METH_PATCH, path, handler, **kwargs) - - def add_delete(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute: - """Shortcut for add_route with method DELETE.""" - return self.add_route(hdrs.METH_DELETE, path, handler, **kwargs) - - def add_view( - self, path: str, handler: Type[AbstractView], **kwargs: Any - ) -> AbstractRoute: - """Shortcut for add_route with ANY methods for a class-based view.""" - return self.add_route(hdrs.METH_ANY, path, handler, **kwargs) - - def freeze(self) -> None: - super().freeze() - for resource in self._resources: - resource.freeze() - - def add_routes(self, routes: Iterable[AbstractRouteDef]) -> List[AbstractRoute]: - """Append routes to route table. - - Parameter should be a sequence of RouteDef objects. - - Returns a list of registered AbstractRoute instances. - """ - registered_routes = [] - for route_def in routes: - registered_routes.extend(route_def.register(self)) - return registered_routes - - -def _quote_path(value: str) -> str: - if YARL_VERSION < (1, 6): - value = value.replace("%", "%25") - return URL.build(path=value, encoded=False).raw_path - - -def _unquote_path(value: str) -> str: - return URL.build(path=value, encoded=True).path - - -def _requote_path(value: str) -> str: - # Quote non-ascii characters and other characters which must be quoted, - # but preserve existing %-sequences. - result = _quote_path(value) - if "%" in value: - result = result.replace("%25", "%") - return result diff --git a/.venv/Lib/site-packages/aiohttp/web_ws.py b/.venv/Lib/site-packages/aiohttp/web_ws.py deleted file mode 100644 index 7833777..0000000 --- a/.venv/Lib/site-packages/aiohttp/web_ws.py +++ /dev/null @@ -1,529 +0,0 @@ -import asyncio -import base64 -import binascii -import hashlib -import json -import sys -from typing import Any, Final, Iterable, Optional, Tuple, cast - -import attr -from multidict import CIMultiDict - -from . import hdrs -from .abc import AbstractStreamWriter -from .helpers import call_later, set_result -from .http import ( - WS_CLOSED_MESSAGE, - WS_CLOSING_MESSAGE, - WS_KEY, - WebSocketError, - WebSocketReader, - WebSocketWriter, - WSCloseCode, - WSMessage, - WSMsgType as WSMsgType, - ws_ext_gen, - ws_ext_parse, -) -from .log import ws_logger -from .streams import EofStream, FlowControlDataQueue -from .typedefs import JSONDecoder, JSONEncoder -from .web_exceptions import HTTPBadRequest, HTTPException -from .web_request import BaseRequest -from .web_response import StreamResponse - -if sys.version_info >= (3, 11): - import asyncio as async_timeout -else: - import async_timeout - -__all__ = ( - "WebSocketResponse", - "WebSocketReady", - "WSMsgType", -) - -THRESHOLD_CONNLOST_ACCESS: Final[int] = 5 - - -@attr.s(auto_attribs=True, frozen=True, slots=True) -class WebSocketReady: - ok: bool - protocol: Optional[str] - - def __bool__(self) -> bool: - return self.ok - - -class WebSocketResponse(StreamResponse): - - _length_check = False - - def __init__( - self, - *, - timeout: float = 10.0, - receive_timeout: Optional[float] = None, - autoclose: bool = True, - autoping: bool = True, - heartbeat: Optional[float] = None, - protocols: Iterable[str] = (), - compress: bool = True, - max_msg_size: int = 4 * 1024 * 1024, - ) -> None: - super().__init__(status=101) - self._protocols = protocols - self._ws_protocol: Optional[str] = None - self._writer: Optional[WebSocketWriter] = None - self._reader: Optional[FlowControlDataQueue[WSMessage]] = None - self._closed = False - self._closing = False - self._conn_lost = 0 - self._close_code: Optional[int] = None - self._loop: Optional[asyncio.AbstractEventLoop] = None - self._waiting: Optional[asyncio.Future[bool]] = None - self._exception: Optional[BaseException] = None - self._timeout = timeout - self._receive_timeout = receive_timeout - self._autoclose = autoclose - self._autoping = autoping - self._heartbeat = heartbeat - self._heartbeat_cb: Optional[asyncio.TimerHandle] = None - if heartbeat is not None: - self._pong_heartbeat = heartbeat / 2.0 - self._pong_response_cb: Optional[asyncio.TimerHandle] = None - self._compress = compress - self._max_msg_size = max_msg_size - - def _cancel_heartbeat(self) -> None: - if self._pong_response_cb is not None: - self._pong_response_cb.cancel() - self._pong_response_cb = None - - if self._heartbeat_cb is not None: - self._heartbeat_cb.cancel() - self._heartbeat_cb = None - - def _reset_heartbeat(self) -> None: - self._cancel_heartbeat() - - if self._heartbeat is not None: - assert self._loop is not None - self._heartbeat_cb = call_later( - self._send_heartbeat, - self._heartbeat, - self._loop, - timeout_ceil_threshold=self._req._protocol._timeout_ceil_threshold - if self._req is not None - else 5, - ) - - def _send_heartbeat(self) -> None: - if self._heartbeat is not None and not self._closed: - assert self._loop is not None - # fire-and-forget a task is not perfect but maybe ok for - # sending ping. Otherwise we need a long-living heartbeat - # task in the class. - self._loop.create_task(self._writer.ping()) # type: ignore[union-attr] - - if self._pong_response_cb is not None: - self._pong_response_cb.cancel() - self._pong_response_cb = call_later( - self._pong_not_received, - self._pong_heartbeat, - self._loop, - timeout_ceil_threshold=self._req._protocol._timeout_ceil_threshold - if self._req is not None - else 5, - ) - - def _pong_not_received(self) -> None: - if self._req is not None and self._req.transport is not None: - self._closed = True - self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) - self._exception = asyncio.TimeoutError() - - async def prepare(self, request: BaseRequest) -> AbstractStreamWriter: - # make pre-check to don't hide it by do_handshake() exceptions - if self._payload_writer is not None: - return self._payload_writer - - protocol, writer = self._pre_start(request) - payload_writer = await super().prepare(request) - assert payload_writer is not None - self._post_start(request, protocol, writer) - await payload_writer.drain() - return payload_writer - - def _handshake( - self, request: BaseRequest - ) -> Tuple["CIMultiDict[str]", str, bool, bool]: - headers = request.headers - if "websocket" != headers.get(hdrs.UPGRADE, "").lower().strip(): - raise HTTPBadRequest( - text=( - "No WebSocket UPGRADE hdr: {}\n Can " - '"Upgrade" only to "WebSocket".' - ).format(headers.get(hdrs.UPGRADE)) - ) - - if "upgrade" not in headers.get(hdrs.CONNECTION, "").lower(): - raise HTTPBadRequest( - text="No CONNECTION upgrade hdr: {}".format( - headers.get(hdrs.CONNECTION) - ) - ) - - # find common sub-protocol between client and server - protocol = None - if hdrs.SEC_WEBSOCKET_PROTOCOL in headers: - req_protocols = [ - str(proto.strip()) - for proto in headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",") - ] - - for proto in req_protocols: - if proto in self._protocols: - protocol = proto - break - else: - # No overlap found: Return no protocol as per spec - ws_logger.warning( - "Client protocols %r don’t overlap server-known ones %r", - req_protocols, - self._protocols, - ) - - # check supported version - version = headers.get(hdrs.SEC_WEBSOCKET_VERSION, "") - if version not in ("13", "8", "7"): - raise HTTPBadRequest(text=f"Unsupported version: {version}") - - # check client handshake for validity - key = headers.get(hdrs.SEC_WEBSOCKET_KEY) - try: - if not key or len(base64.b64decode(key)) != 16: - raise HTTPBadRequest(text=f"Handshake error: {key!r}") - except binascii.Error: - raise HTTPBadRequest(text=f"Handshake error: {key!r}") from None - - accept_val = base64.b64encode( - hashlib.sha1(key.encode() + WS_KEY).digest() - ).decode() - response_headers = CIMultiDict( - { - hdrs.UPGRADE: "websocket", - hdrs.CONNECTION: "upgrade", - hdrs.SEC_WEBSOCKET_ACCEPT: accept_val, - } - ) - - notakeover = False - compress = 0 - if self._compress: - extensions = headers.get(hdrs.SEC_WEBSOCKET_EXTENSIONS) - # Server side always get return with no exception. - # If something happened, just drop compress extension - compress, notakeover = ws_ext_parse(extensions, isserver=True) - if compress: - enabledext = ws_ext_gen( - compress=compress, isserver=True, server_notakeover=notakeover - ) - response_headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = enabledext - - if protocol: - response_headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = protocol - return ( - response_headers, - protocol, - compress, - notakeover, - ) # type: ignore[return-value] - - def _pre_start(self, request: BaseRequest) -> Tuple[str, WebSocketWriter]: - self._loop = request._loop - - headers, protocol, compress, notakeover = self._handshake(request) - - self.set_status(101) - self.headers.update(headers) - self.force_close() - self._compress = compress - transport = request._protocol.transport - assert transport is not None - writer = WebSocketWriter( - request._protocol, transport, compress=compress, notakeover=notakeover - ) - - return protocol, writer - - def _post_start( - self, request: BaseRequest, protocol: str, writer: WebSocketWriter - ) -> None: - self._ws_protocol = protocol - self._writer = writer - - self._reset_heartbeat() - - loop = self._loop - assert loop is not None - self._reader = FlowControlDataQueue(request._protocol, 2**16, loop=loop) - request.protocol.set_parser( - WebSocketReader(self._reader, self._max_msg_size, compress=self._compress) - ) - # disable HTTP keepalive for WebSocket - request.protocol.keep_alive(False) - - def can_prepare(self, request: BaseRequest) -> WebSocketReady: - if self._writer is not None: - raise RuntimeError("Already started") - try: - _, protocol, _, _ = self._handshake(request) - except HTTPException: - return WebSocketReady(False, None) - else: - return WebSocketReady(True, protocol) - - @property - def closed(self) -> bool: - return self._closed - - @property - def close_code(self) -> Optional[int]: - return self._close_code - - @property - def ws_protocol(self) -> Optional[str]: - return self._ws_protocol - - @property - def compress(self) -> bool: - return self._compress - - def get_extra_info(self, name: str, default: Any = None) -> Any: - """Get optional transport information. - - If no value associated with ``name`` is found, ``default`` is returned. - """ - writer = self._writer - if writer is None: - return default - transport = writer.transport - if transport is None: - return default - return transport.get_extra_info(name, default) - - def exception(self) -> Optional[BaseException]: - return self._exception - - async def ping(self, message: bytes = b"") -> None: - if self._writer is None: - raise RuntimeError("Call .prepare() first") - await self._writer.ping(message) - - async def pong(self, message: bytes = b"") -> None: - # unsolicited pong - if self._writer is None: - raise RuntimeError("Call .prepare() first") - await self._writer.pong(message) - - async def send_str(self, data: str, compress: Optional[bool] = None) -> None: - if self._writer is None: - raise RuntimeError("Call .prepare() first") - if not isinstance(data, str): - raise TypeError("data argument must be str (%r)" % type(data)) - await self._writer.send(data, binary=False, compress=compress) - - async def send_bytes(self, data: bytes, compress: Optional[bool] = None) -> None: - if self._writer is None: - raise RuntimeError("Call .prepare() first") - if not isinstance(data, (bytes, bytearray, memoryview)): - raise TypeError("data argument must be byte-ish (%r)" % type(data)) - await self._writer.send(data, binary=True, compress=compress) - - async def send_json( - self, - data: Any, - compress: Optional[bool] = None, - *, - dumps: JSONEncoder = json.dumps, - ) -> None: - await self.send_str(dumps(data), compress=compress) - - async def write_eof(self) -> None: # type: ignore[override] - if self._eof_sent: - return - if self._payload_writer is None: - raise RuntimeError("Response has not been started") - - await self.close() - self._eof_sent = True - - async def close( - self, *, code: int = WSCloseCode.OK, message: bytes = b"", drain: bool = True - ) -> bool: - """Close websocket connection.""" - if self._writer is None: - raise RuntimeError("Call .prepare() first") - - self._cancel_heartbeat() - reader = self._reader - assert reader is not None - - # we need to break `receive()` cycle first, - # `close()` may be called from different task - if self._waiting is not None and not self._closed: - reader.feed_data(WS_CLOSING_MESSAGE, 0) - await self._waiting - - if self._closed: - return False - - self._closed = True - try: - await self._writer.close(code, message) - writer = self._payload_writer - assert writer is not None - if drain: - await writer.drain() - except (asyncio.CancelledError, asyncio.TimeoutError): - self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) - raise - except Exception as exc: - self._exception = exc - self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) - return True - - if self._closing: - return True - - reader = self._reader - assert reader is not None - try: - async with async_timeout.timeout(self._timeout): - msg = await reader.read() - except asyncio.CancelledError: - self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) - raise - except Exception as exc: - self._exception = exc - self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) - return True - - if msg.type == WSMsgType.CLOSE: - self._set_code_close_transport(msg.data) - return True - - self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) - self._exception = asyncio.TimeoutError() - return True - - def _set_code_close_transport(self, code: WSCloseCode) -> None: - """Set the close code and close the transport.""" - self._close_code = code - if self._req is not None and self._req.transport is not None: - self._req.transport.close() - - async def receive(self, timeout: Optional[float] = None) -> WSMessage: - if self._reader is None: - raise RuntimeError("Call .prepare() first") - - loop = self._loop - assert loop is not None - while True: - if self._waiting is not None: - raise RuntimeError("Concurrent call to receive() is not allowed") - - if self._closed: - self._conn_lost += 1 - if self._conn_lost >= THRESHOLD_CONNLOST_ACCESS: - raise RuntimeError("WebSocket connection is closed.") - return WS_CLOSED_MESSAGE - elif self._closing: - return WS_CLOSING_MESSAGE - - try: - self._waiting = loop.create_future() - try: - async with async_timeout.timeout(timeout or self._receive_timeout): - msg = await self._reader.read() - self._reset_heartbeat() - finally: - waiter = self._waiting - set_result(waiter, True) - self._waiting = None - except (asyncio.CancelledError, asyncio.TimeoutError): - self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) - raise - except EofStream: - self._close_code = WSCloseCode.OK - await self.close() - return WSMessage(WSMsgType.CLOSED, None, None) - except WebSocketError as exc: - self._close_code = exc.code - await self.close(code=exc.code) - return WSMessage(WSMsgType.ERROR, exc, None) - except Exception as exc: - self._exception = exc - self._closing = True - self._close_code = WSCloseCode.ABNORMAL_CLOSURE - await self.close() - return WSMessage(WSMsgType.ERROR, exc, None) - - if msg.type == WSMsgType.CLOSE: - self._closing = True - self._close_code = msg.data - # Could be closed while awaiting reader. - if not self._closed and self._autoclose: - # The client is likely going to close the - # connection out from under us so we do not - # want to drain any pending writes as it will - # likely result writing to a broken pipe. - await self.close(drain=False) - elif msg.type == WSMsgType.CLOSING: - self._closing = True - elif msg.type == WSMsgType.PING and self._autoping: - await self.pong(msg.data) - continue - elif msg.type == WSMsgType.PONG and self._autoping: - continue - - return msg - - async def receive_str(self, *, timeout: Optional[float] = None) -> str: - msg = await self.receive(timeout) - if msg.type != WSMsgType.TEXT: - raise TypeError( - "Received message {}:{!r} is not WSMsgType.TEXT".format( - msg.type, msg.data - ) - ) - return cast(str, msg.data) - - async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes: - msg = await self.receive(timeout) - if msg.type != WSMsgType.BINARY: - raise TypeError(f"Received message {msg.type}:{msg.data!r} is not bytes") - return cast(bytes, msg.data) - - async def receive_json( - self, *, loads: JSONDecoder = json.loads, timeout: Optional[float] = None - ) -> Any: - data = await self.receive_str(timeout=timeout) - return loads(data) - - async def write(self, data: bytes) -> None: - raise RuntimeError("Cannot call .write() for websocket") - - def __aiter__(self) -> "WebSocketResponse": - return self - - async def __anext__(self) -> WSMessage: - msg = await self.receive() - if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.CLOSED): - raise StopAsyncIteration - return msg - - def _cancel(self, exc: BaseException) -> None: - if self._reader is not None: - self._reader.set_exception(exc) diff --git a/.venv/Lib/site-packages/aiohttp/worker.py b/.venv/Lib/site-packages/aiohttp/worker.py deleted file mode 100644 index 9b30769..0000000 --- a/.venv/Lib/site-packages/aiohttp/worker.py +++ /dev/null @@ -1,247 +0,0 @@ -"""Async gunicorn worker for aiohttp.web""" - -import asyncio -import os -import re -import signal -import sys -from types import FrameType -from typing import Any, Awaitable, Callable, Optional, Union # noqa - -from gunicorn.config import AccessLogFormat as GunicornAccessLogFormat -from gunicorn.workers import base - -from aiohttp import web - -from .helpers import set_result -from .web_app import Application -from .web_log import AccessLogger - -try: - import ssl - - SSLContext = ssl.SSLContext -except ImportError: # pragma: no cover - ssl = None # type: ignore[assignment] - SSLContext = object # type: ignore[misc,assignment] - - -__all__ = ("GunicornWebWorker", "GunicornUVLoopWebWorker") - - -class GunicornWebWorker(base.Worker): # type: ignore[misc,no-any-unimported] - - DEFAULT_AIOHTTP_LOG_FORMAT = AccessLogger.LOG_FORMAT - DEFAULT_GUNICORN_LOG_FORMAT = GunicornAccessLogFormat.default - - def __init__(self, *args: Any, **kw: Any) -> None: # pragma: no cover - super().__init__(*args, **kw) - - self._task: Optional[asyncio.Task[None]] = None - self.exit_code = 0 - self._notify_waiter: Optional[asyncio.Future[bool]] = None - - def init_process(self) -> None: - # create new event_loop after fork - asyncio.get_event_loop().close() - - self.loop = asyncio.new_event_loop() - asyncio.set_event_loop(self.loop) - - super().init_process() - - def run(self) -> None: - self._task = self.loop.create_task(self._run()) - - try: # ignore all finalization problems - self.loop.run_until_complete(self._task) - except Exception: - self.log.exception("Exception in gunicorn worker") - self.loop.run_until_complete(self.loop.shutdown_asyncgens()) - self.loop.close() - - sys.exit(self.exit_code) - - async def _run(self) -> None: - runner = None - if isinstance(self.wsgi, Application): - app = self.wsgi - elif asyncio.iscoroutinefunction(self.wsgi): - wsgi = await self.wsgi() - if isinstance(wsgi, web.AppRunner): - runner = wsgi - app = runner.app - else: - app = wsgi - else: - raise RuntimeError( - "wsgi app should be either Application or " - "async function returning Application, got {}".format(self.wsgi) - ) - - if runner is None: - access_log = self.log.access_log if self.cfg.accesslog else None - runner = web.AppRunner( - app, - logger=self.log, - keepalive_timeout=self.cfg.keepalive, - access_log=access_log, - access_log_format=self._get_valid_log_format( - self.cfg.access_log_format - ), - shutdown_timeout=self.cfg.graceful_timeout / 100 * 95, - ) - await runner.setup() - - ctx = self._create_ssl_context(self.cfg) if self.cfg.is_ssl else None - - runner = runner - assert runner is not None - server = runner.server - assert server is not None - for sock in self.sockets: - site = web.SockSite( - runner, - sock, - ssl_context=ctx, - ) - await site.start() - - # If our parent changed then we shut down. - pid = os.getpid() - try: - while self.alive: # type: ignore[has-type] - self.notify() - - cnt = server.requests_count - if self.max_requests and cnt > self.max_requests: - self.alive = False - self.log.info("Max requests, shutting down: %s", self) - - elif pid == os.getpid() and self.ppid != os.getppid(): - self.alive = False - self.log.info("Parent changed, shutting down: %s", self) - else: - await self._wait_next_notify() - except BaseException: - pass - - await runner.cleanup() - - def _wait_next_notify(self) -> "asyncio.Future[bool]": - self._notify_waiter_done() - - loop = self.loop - assert loop is not None - self._notify_waiter = waiter = loop.create_future() - self.loop.call_later(1.0, self._notify_waiter_done, waiter) - - return waiter - - def _notify_waiter_done( - self, waiter: Optional["asyncio.Future[bool]"] = None - ) -> None: - if waiter is None: - waiter = self._notify_waiter - if waiter is not None: - set_result(waiter, True) - - if waiter is self._notify_waiter: - self._notify_waiter = None - - def init_signals(self) -> None: - # Set up signals through the event loop API. - - self.loop.add_signal_handler( - signal.SIGQUIT, self.handle_quit, signal.SIGQUIT, None - ) - - self.loop.add_signal_handler( - signal.SIGTERM, self.handle_exit, signal.SIGTERM, None - ) - - self.loop.add_signal_handler( - signal.SIGINT, self.handle_quit, signal.SIGINT, None - ) - - self.loop.add_signal_handler( - signal.SIGWINCH, self.handle_winch, signal.SIGWINCH, None - ) - - self.loop.add_signal_handler( - signal.SIGUSR1, self.handle_usr1, signal.SIGUSR1, None - ) - - self.loop.add_signal_handler( - signal.SIGABRT, self.handle_abort, signal.SIGABRT, None - ) - - # Don't let SIGTERM and SIGUSR1 disturb active requests - # by interrupting system calls - signal.siginterrupt(signal.SIGTERM, False) - signal.siginterrupt(signal.SIGUSR1, False) - # Reset signals so Gunicorn doesn't swallow subprocess return codes - # See: https://github.com/aio-libs/aiohttp/issues/6130 - - def handle_quit(self, sig: int, frame: Optional[FrameType]) -> None: - self.alive = False - - # worker_int callback - self.cfg.worker_int(self) - - # wakeup closing process - self._notify_waiter_done() - - def handle_abort(self, sig: int, frame: Optional[FrameType]) -> None: - self.alive = False - self.exit_code = 1 - self.cfg.worker_abort(self) - sys.exit(1) - - @staticmethod - def _create_ssl_context(cfg: Any) -> "SSLContext": - """Creates SSLContext instance for usage in asyncio.create_server. - - See ssl.SSLSocket.__init__ for more details. - """ - if ssl is None: # pragma: no cover - raise RuntimeError("SSL is not supported.") - - ctx = ssl.SSLContext(cfg.ssl_version) - ctx.load_cert_chain(cfg.certfile, cfg.keyfile) - ctx.verify_mode = cfg.cert_reqs - if cfg.ca_certs: - ctx.load_verify_locations(cfg.ca_certs) - if cfg.ciphers: - ctx.set_ciphers(cfg.ciphers) - return ctx - - def _get_valid_log_format(self, source_format: str) -> str: - if source_format == self.DEFAULT_GUNICORN_LOG_FORMAT: - return self.DEFAULT_AIOHTTP_LOG_FORMAT - elif re.search(r"%\([^\)]+\)", source_format): - raise ValueError( - "Gunicorn's style options in form of `%(name)s` are not " - "supported for the log formatting. Please use aiohttp's " - "format specification to configure access log formatting: " - "http://docs.aiohttp.org/en/stable/logging.html" - "#format-specification" - ) - else: - return source_format - - -class GunicornUVLoopWebWorker(GunicornWebWorker): - def init_process(self) -> None: - import uvloop - - # Close any existing event loop before setting a - # new policy. - asyncio.get_event_loop().close() - - # Setup uvloop policy, so that every - # asyncio.get_event_loop() will create an instance - # of uvloop event loop. - asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) - - super().init_process() diff --git a/.venv/Lib/site-packages/aiosignal-1.3.1.dist-info/INSTALLER b/.venv/Lib/site-packages/aiosignal-1.3.1.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/.venv/Lib/site-packages/aiosignal-1.3.1.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/.venv/Lib/site-packages/aiosignal-1.3.1.dist-info/LICENSE b/.venv/Lib/site-packages/aiosignal-1.3.1.dist-info/LICENSE deleted file mode 100644 index 7082a2d..0000000 --- a/.venv/Lib/site-packages/aiosignal-1.3.1.dist-info/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ -Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "{}" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2013-2019 Nikolay Kim and Andrew Svetlov - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/.venv/Lib/site-packages/aiosignal-1.3.1.dist-info/METADATA b/.venv/Lib/site-packages/aiosignal-1.3.1.dist-info/METADATA deleted file mode 100644 index fc96452..0000000 --- a/.venv/Lib/site-packages/aiosignal-1.3.1.dist-info/METADATA +++ /dev/null @@ -1,128 +0,0 @@ -Metadata-Version: 2.1 -Name: aiosignal -Version: 1.3.1 -Summary: aiosignal: a list of registered asynchronous callbacks -Home-page: https://github.com/aio-libs/aiosignal -Maintainer: aiohttp team -Maintainer-email: team@aiohttp.org -License: Apache 2.0 -Project-URL: Chat: Gitter, https://gitter.im/aio-libs/Lobby -Project-URL: CI: GitHub Actions, https://github.com/aio-libs/aiosignal/actions -Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/aiosignal -Project-URL: Docs: RTD, https://docs.aiosignal.org -Project-URL: GitHub: issues, https://github.com/aio-libs/aiosignal/issues -Project-URL: GitHub: repo, https://github.com/aio-libs/aiosignal -Classifier: License :: OSI Approved :: Apache Software License -Classifier: Intended Audience :: Developers -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Development Status :: 5 - Production/Stable -Classifier: Operating System :: POSIX -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Operating System :: Microsoft :: Windows -Classifier: Framework :: AsyncIO -Requires-Python: >=3.7 -Description-Content-Type: text/x-rst -License-File: LICENSE -Requires-Dist: frozenlist (>=1.1.0) - -========= -aiosignal -========= - -.. image:: https://github.com/aio-libs/aiosignal/workflows/CI/badge.svg - :target: https://github.com/aio-libs/aiosignal/actions?query=workflow%3ACI - :alt: GitHub status for master branch - -.. image:: https://codecov.io/gh/aio-libs/aiosignal/branch/master/graph/badge.svg - :target: https://codecov.io/gh/aio-libs/aiosignal - :alt: codecov.io status for master branch - -.. image:: https://badge.fury.io/py/aiosignal.svg - :target: https://pypi.org/project/aiosignal - :alt: Latest PyPI package version - -.. image:: https://readthedocs.org/projects/aiosignal/badge/?version=latest - :target: https://aiosignal.readthedocs.io/ - :alt: Latest Read The Docs - -.. image:: https://img.shields.io/discourse/topics?server=https%3A%2F%2Faio-libs.discourse.group%2F - :target: https://aio-libs.discourse.group/ - :alt: Discourse group for io-libs - -.. image:: https://badges.gitter.im/Join%20Chat.svg - :target: https://gitter.im/aio-libs/Lobby - :alt: Chat on Gitter - -Introduction -============ - -A project to manage callbacks in `asyncio` projects. - -``Signal`` is a list of registered asynchronous callbacks. - -The signal's life-cycle has two stages: after creation its content -could be filled by using standard list operations: ``sig.append()`` -etc. - -After you call ``sig.freeze()`` the signal is *frozen*: adding, removing -and dropping callbacks is forbidden. - -The only available operation is calling the previously registered -callbacks by using ``await sig.send(data)``. - -For concrete usage examples see the `Signals - -section of the `Web Server Advanced -` chapter of the `aiohttp -documentation`_. - - -Installation ------------- - -:: - - $ pip install aiosignal - -The library requires Python 3.6 or newer. - - -Documentation -============= - -https://aiosignal.readthedocs.io/ - -Communication channels -====================== - -*gitter chat* https://gitter.im/aio-libs/Lobby - -Requirements -============ - -- Python >= 3.6 -- frozenlist >= 1.0.0 - -License -======= - -``aiosignal`` is offered under the Apache 2 license. - -Source code -=========== - -The project is hosted on GitHub_ - -Please file an issue in the `bug tracker -`_ if you have found a bug -or have some suggestions to improve the library. - -.. _GitHub: https://github.com/aio-libs/aiosignal -.. _aiohttp documentation: https://docs.aiohttp.org/ diff --git a/.venv/Lib/site-packages/aiosignal-1.3.1.dist-info/RECORD b/.venv/Lib/site-packages/aiosignal-1.3.1.dist-info/RECORD deleted file mode 100644 index 59be23b..0000000 --- a/.venv/Lib/site-packages/aiosignal-1.3.1.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -aiosignal-1.3.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -aiosignal-1.3.1.dist-info/LICENSE,sha256=b9UkPpLdf5jsacesN3co50kFcJ_1J6W_mNbQJjwE9bY,11332 -aiosignal-1.3.1.dist-info/METADATA,sha256=c0HRnlYzfXKztZPTFDlPfygizTherhG5WdwXlvco0Ug,4008 -aiosignal-1.3.1.dist-info/RECORD,, -aiosignal-1.3.1.dist-info/WHEEL,sha256=ZL1lC_LiPDNRgDnOl2taCMc83aPEUZgHHv2h-LDgdiM,92 -aiosignal-1.3.1.dist-info/top_level.txt,sha256=z45aNOKGDdrI1roqZY3BGXQ22kJFPHBmVdwtLYLtXC0,10 -aiosignal/__init__.py,sha256=zQNfFYRSd84bswvpFv8ZWjEr5DeYwV3LXbMSyo2222s,867 -aiosignal/__init__.pyi,sha256=xeCddYSS8fZAkz8S4HuKSR2IDe3N7RW_LKcXDPPA1Xk,311 -aiosignal/__pycache__/__init__.cpython-311.pyc,, -aiosignal/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/.venv/Lib/site-packages/aiosignal-1.3.1.dist-info/WHEEL b/.venv/Lib/site-packages/aiosignal-1.3.1.dist-info/WHEEL deleted file mode 100644 index 5e1f087..0000000 --- a/.venv/Lib/site-packages/aiosignal-1.3.1.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.38.2) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/.venv/Lib/site-packages/aiosignal-1.3.1.dist-info/top_level.txt b/.venv/Lib/site-packages/aiosignal-1.3.1.dist-info/top_level.txt deleted file mode 100644 index ac6df3a..0000000 --- a/.venv/Lib/site-packages/aiosignal-1.3.1.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -aiosignal diff --git a/.venv/Lib/site-packages/aiosignal/__init__.py b/.venv/Lib/site-packages/aiosignal/__init__.py deleted file mode 100644 index 3d288e6..0000000 --- a/.venv/Lib/site-packages/aiosignal/__init__.py +++ /dev/null @@ -1,36 +0,0 @@ -from frozenlist import FrozenList - -__version__ = "1.3.1" - -__all__ = ("Signal",) - - -class Signal(FrozenList): - """Coroutine-based signal implementation. - - To connect a callback to a signal, use any list method. - - Signals are fired using the send() coroutine, which takes named - arguments. - """ - - __slots__ = ("_owner",) - - def __init__(self, owner): - super().__init__() - self._owner = owner - - def __repr__(self): - return "".format( - self._owner, self.frozen, list(self) - ) - - async def send(self, *args, **kwargs): - """ - Sends data to all registered receivers. - """ - if not self.frozen: - raise RuntimeError("Cannot send non-frozen signal.") - - for receiver in self: - await receiver(*args, **kwargs) # type: ignore diff --git a/.venv/Lib/site-packages/aiosignal/__init__.pyi b/.venv/Lib/site-packages/aiosignal/__init__.pyi deleted file mode 100644 index d4e3416..0000000 --- a/.venv/Lib/site-packages/aiosignal/__init__.pyi +++ /dev/null @@ -1,12 +0,0 @@ -from typing import Any, Generic, TypeVar - -from frozenlist import FrozenList - -__all__ = ("Signal",) - -_T = TypeVar("_T") - -class Signal(FrozenList[_T], Generic[_T]): - def __init__(self, owner: Any) -> None: ... - def __repr__(self) -> str: ... - async def send(self, *args: Any, **kwargs: Any) -> None: ... diff --git a/.venv/Lib/site-packages/aiosignal/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/aiosignal/__pycache__/__init__.cpython-311.pyc deleted file mode 100644 index c57522e..0000000 Binary files a/.venv/Lib/site-packages/aiosignal/__pycache__/__init__.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/aiosignal/py.typed b/.venv/Lib/site-packages/aiosignal/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/.venv/Lib/site-packages/attr/__init__.py b/.venv/Lib/site-packages/attr/__init__.py deleted file mode 100644 index 9226258..0000000 --- a/.venv/Lib/site-packages/attr/__init__.py +++ /dev/null @@ -1,134 +0,0 @@ -# SPDX-License-Identifier: MIT - -""" -Classes Without Boilerplate -""" - -from functools import partial -from typing import Callable - -from . import converters, exceptions, filters, setters, validators -from ._cmp import cmp_using -from ._compat import Protocol -from ._config import get_run_validators, set_run_validators -from ._funcs import asdict, assoc, astuple, evolve, has, resolve_types -from ._make import ( - NOTHING, - Attribute, - Factory, - attrib, - attrs, - fields, - fields_dict, - make_class, - validate, -) -from ._next_gen import define, field, frozen, mutable -from ._version_info import VersionInfo - - -s = attributes = attrs -ib = attr = attrib -dataclass = partial(attrs, auto_attribs=True) # happy Easter ;) - - -class AttrsInstance(Protocol): - pass - - -__all__ = [ - "Attribute", - "AttrsInstance", - "Factory", - "NOTHING", - "asdict", - "assoc", - "astuple", - "attr", - "attrib", - "attributes", - "attrs", - "cmp_using", - "converters", - "define", - "evolve", - "exceptions", - "field", - "fields", - "fields_dict", - "filters", - "frozen", - "get_run_validators", - "has", - "ib", - "make_class", - "mutable", - "resolve_types", - "s", - "set_run_validators", - "setters", - "validate", - "validators", -] - - -def _make_getattr(mod_name: str) -> Callable: - """ - Create a metadata proxy for packaging information that uses *mod_name* in - its warnings and errors. - """ - - def __getattr__(name: str) -> str: - dunder_to_metadata = { - "__title__": "Name", - "__copyright__": "", - "__version__": "version", - "__version_info__": "version", - "__description__": "summary", - "__uri__": "", - "__url__": "", - "__author__": "", - "__email__": "", - "__license__": "license", - } - if name not in dunder_to_metadata: - msg = f"module {mod_name} has no attribute {name}" - raise AttributeError(msg) - - import sys - import warnings - - if sys.version_info < (3, 8): - from importlib_metadata import metadata - else: - from importlib.metadata import metadata - - if name not in ("__version__", "__version_info__"): - warnings.warn( - f"Accessing {mod_name}.{name} is deprecated and will be " - "removed in a future release. Use importlib.metadata directly " - "to query for attrs's packaging metadata.", - DeprecationWarning, - stacklevel=2, - ) - - meta = metadata("attrs") - if name == "__license__": - return "MIT" - if name == "__copyright__": - return "Copyright (c) 2015 Hynek Schlawack" - if name in ("__uri__", "__url__"): - return meta["Project-URL"].split(" ", 1)[-1] - if name == "__version_info__": - return VersionInfo._from_version_string(meta["version"]) - if name == "__author__": - return meta["Author-email"].rsplit(" ", 1)[0] - if name == "__email__": - return meta["Author-email"].rsplit("<", 1)[1][:-1] - - return meta[dunder_to_metadata[name]] - - return __getattr__ - - -__getattr__ = _make_getattr(__name__) diff --git a/.venv/Lib/site-packages/attr/__init__.pyi b/.venv/Lib/site-packages/attr/__init__.pyi deleted file mode 100644 index 37a2087..0000000 --- a/.venv/Lib/site-packages/attr/__init__.pyi +++ /dev/null @@ -1,555 +0,0 @@ -import enum -import sys - -from typing import ( - Any, - Callable, - Dict, - Generic, - List, - Mapping, - Optional, - Protocol, - Sequence, - Tuple, - Type, - TypeVar, - Union, - overload, -) - -# `import X as X` is required to make these public -from . import converters as converters -from . import exceptions as exceptions -from . import filters as filters -from . import setters as setters -from . import validators as validators -from ._cmp import cmp_using as cmp_using -from ._typing_compat import AttrsInstance_ -from ._version_info import VersionInfo - -if sys.version_info >= (3, 10): - from typing import TypeGuard -else: - from typing_extensions import TypeGuard - -if sys.version_info >= (3, 11): - from typing import dataclass_transform -else: - from typing_extensions import dataclass_transform - -__version__: str -__version_info__: VersionInfo -__title__: str -__description__: str -__url__: str -__uri__: str -__author__: str -__email__: str -__license__: str -__copyright__: str - -_T = TypeVar("_T") -_C = TypeVar("_C", bound=type) - -_EqOrderType = Union[bool, Callable[[Any], Any]] -_ValidatorType = Callable[[Any, "Attribute[_T]", _T], Any] -_ConverterType = Callable[[Any], Any] -_FilterType = Callable[["Attribute[_T]", _T], bool] -_ReprType = Callable[[Any], str] -_ReprArgType = Union[bool, _ReprType] -_OnSetAttrType = Callable[[Any, "Attribute[Any]", Any], Any] -_OnSetAttrArgType = Union[ - _OnSetAttrType, List[_OnSetAttrType], setters._NoOpType -] -_FieldTransformer = Callable[ - [type, List["Attribute[Any]"]], List["Attribute[Any]"] -] -# FIXME: in reality, if multiple validators are passed they must be in a list -# or tuple, but those are invariant and so would prevent subtypes of -# _ValidatorType from working when passed in a list or tuple. -_ValidatorArgType = Union[_ValidatorType[_T], Sequence[_ValidatorType[_T]]] - -# We subclass this here to keep the protocol's qualified name clean. -class AttrsInstance(AttrsInstance_, Protocol): - pass - -_A = TypeVar("_A", bound=type[AttrsInstance]) - -class _Nothing(enum.Enum): - NOTHING = enum.auto() - -NOTHING = _Nothing.NOTHING - -# NOTE: Factory lies about its return type to make this possible: -# `x: List[int] # = Factory(list)` -# Work around mypy issue #4554 in the common case by using an overload. -if sys.version_info >= (3, 8): - from typing import Literal - @overload - def Factory(factory: Callable[[], _T]) -> _T: ... - @overload - def Factory( - factory: Callable[[Any], _T], - takes_self: Literal[True], - ) -> _T: ... - @overload - def Factory( - factory: Callable[[], _T], - takes_self: Literal[False], - ) -> _T: ... - -else: - @overload - def Factory(factory: Callable[[], _T]) -> _T: ... - @overload - def Factory( - factory: Union[Callable[[Any], _T], Callable[[], _T]], - takes_self: bool = ..., - ) -> _T: ... - -class Attribute(Generic[_T]): - name: str - default: Optional[_T] - validator: Optional[_ValidatorType[_T]] - repr: _ReprArgType - cmp: _EqOrderType - eq: _EqOrderType - order: _EqOrderType - hash: Optional[bool] - init: bool - converter: Optional[_ConverterType] - metadata: Dict[Any, Any] - type: Optional[Type[_T]] - kw_only: bool - on_setattr: _OnSetAttrType - alias: Optional[str] - - def evolve(self, **changes: Any) -> "Attribute[Any]": ... - -# NOTE: We had several choices for the annotation to use for type arg: -# 1) Type[_T] -# - Pros: Handles simple cases correctly -# - Cons: Might produce less informative errors in the case of conflicting -# TypeVars e.g. `attr.ib(default='bad', type=int)` -# 2) Callable[..., _T] -# - Pros: Better error messages than #1 for conflicting TypeVars -# - Cons: Terrible error messages for validator checks. -# e.g. attr.ib(type=int, validator=validate_str) -# -> error: Cannot infer function type argument -# 3) type (and do all of the work in the mypy plugin) -# - Pros: Simple here, and we could customize the plugin with our own errors. -# - Cons: Would need to write mypy plugin code to handle all the cases. -# We chose option #1. - -# `attr` lies about its return type to make the following possible: -# attr() -> Any -# attr(8) -> int -# attr(validator=) -> Whatever the callable expects. -# This makes this type of assignments possible: -# x: int = attr(8) -# -# This form catches explicit None or no default but with no other arguments -# returns Any. -@overload -def attrib( - default: None = ..., - validator: None = ..., - repr: _ReprArgType = ..., - cmp: Optional[_EqOrderType] = ..., - hash: Optional[bool] = ..., - init: bool = ..., - metadata: Optional[Mapping[Any, Any]] = ..., - type: None = ..., - converter: None = ..., - factory: None = ..., - kw_only: bool = ..., - eq: Optional[_EqOrderType] = ..., - order: Optional[_EqOrderType] = ..., - on_setattr: Optional[_OnSetAttrArgType] = ..., - alias: Optional[str] = ..., -) -> Any: ... - -# This form catches an explicit None or no default and infers the type from the -# other arguments. -@overload -def attrib( - default: None = ..., - validator: Optional[_ValidatorArgType[_T]] = ..., - repr: _ReprArgType = ..., - cmp: Optional[_EqOrderType] = ..., - hash: Optional[bool] = ..., - init: bool = ..., - metadata: Optional[Mapping[Any, Any]] = ..., - type: Optional[Type[_T]] = ..., - converter: Optional[_ConverterType] = ..., - factory: Optional[Callable[[], _T]] = ..., - kw_only: bool = ..., - eq: Optional[_EqOrderType] = ..., - order: Optional[_EqOrderType] = ..., - on_setattr: Optional[_OnSetAttrArgType] = ..., - alias: Optional[str] = ..., -) -> _T: ... - -# This form catches an explicit default argument. -@overload -def attrib( - default: _T, - validator: Optional[_ValidatorArgType[_T]] = ..., - repr: _ReprArgType = ..., - cmp: Optional[_EqOrderType] = ..., - hash: Optional[bool] = ..., - init: bool = ..., - metadata: Optional[Mapping[Any, Any]] = ..., - type: Optional[Type[_T]] = ..., - converter: Optional[_ConverterType] = ..., - factory: Optional[Callable[[], _T]] = ..., - kw_only: bool = ..., - eq: Optional[_EqOrderType] = ..., - order: Optional[_EqOrderType] = ..., - on_setattr: Optional[_OnSetAttrArgType] = ..., - alias: Optional[str] = ..., -) -> _T: ... - -# This form covers type=non-Type: e.g. forward references (str), Any -@overload -def attrib( - default: Optional[_T] = ..., - validator: Optional[_ValidatorArgType[_T]] = ..., - repr: _ReprArgType = ..., - cmp: Optional[_EqOrderType] = ..., - hash: Optional[bool] = ..., - init: bool = ..., - metadata: Optional[Mapping[Any, Any]] = ..., - type: object = ..., - converter: Optional[_ConverterType] = ..., - factory: Optional[Callable[[], _T]] = ..., - kw_only: bool = ..., - eq: Optional[_EqOrderType] = ..., - order: Optional[_EqOrderType] = ..., - on_setattr: Optional[_OnSetAttrArgType] = ..., - alias: Optional[str] = ..., -) -> Any: ... -@overload -def field( - *, - default: None = ..., - validator: None = ..., - repr: _ReprArgType = ..., - hash: Optional[bool] = ..., - init: bool = ..., - metadata: Optional[Mapping[Any, Any]] = ..., - converter: None = ..., - factory: None = ..., - kw_only: bool = ..., - eq: Optional[bool] = ..., - order: Optional[bool] = ..., - on_setattr: Optional[_OnSetAttrArgType] = ..., - alias: Optional[str] = ..., - type: Optional[type] = ..., -) -> Any: ... - -# This form catches an explicit None or no default and infers the type from the -# other arguments. -@overload -def field( - *, - default: None = ..., - validator: Optional[_ValidatorArgType[_T]] = ..., - repr: _ReprArgType = ..., - hash: Optional[bool] = ..., - init: bool = ..., - metadata: Optional[Mapping[Any, Any]] = ..., - converter: Optional[_ConverterType] = ..., - factory: Optional[Callable[[], _T]] = ..., - kw_only: bool = ..., - eq: Optional[_EqOrderType] = ..., - order: Optional[_EqOrderType] = ..., - on_setattr: Optional[_OnSetAttrArgType] = ..., - alias: Optional[str] = ..., - type: Optional[type] = ..., -) -> _T: ... - -# This form catches an explicit default argument. -@overload -def field( - *, - default: _T, - validator: Optional[_ValidatorArgType[_T]] = ..., - repr: _ReprArgType = ..., - hash: Optional[bool] = ..., - init: bool = ..., - metadata: Optional[Mapping[Any, Any]] = ..., - converter: Optional[_ConverterType] = ..., - factory: Optional[Callable[[], _T]] = ..., - kw_only: bool = ..., - eq: Optional[_EqOrderType] = ..., - order: Optional[_EqOrderType] = ..., - on_setattr: Optional[_OnSetAttrArgType] = ..., - alias: Optional[str] = ..., - type: Optional[type] = ..., -) -> _T: ... - -# This form covers type=non-Type: e.g. forward references (str), Any -@overload -def field( - *, - default: Optional[_T] = ..., - validator: Optional[_ValidatorArgType[_T]] = ..., - repr: _ReprArgType = ..., - hash: Optional[bool] = ..., - init: bool = ..., - metadata: Optional[Mapping[Any, Any]] = ..., - converter: Optional[_ConverterType] = ..., - factory: Optional[Callable[[], _T]] = ..., - kw_only: bool = ..., - eq: Optional[_EqOrderType] = ..., - order: Optional[_EqOrderType] = ..., - on_setattr: Optional[_OnSetAttrArgType] = ..., - alias: Optional[str] = ..., - type: Optional[type] = ..., -) -> Any: ... -@overload -@dataclass_transform(order_default=True, field_specifiers=(attrib, field)) -def attrs( - maybe_cls: _C, - these: Optional[Dict[str, Any]] = ..., - repr_ns: Optional[str] = ..., - repr: bool = ..., - cmp: Optional[_EqOrderType] = ..., - hash: Optional[bool] = ..., - init: bool = ..., - slots: bool = ..., - frozen: bool = ..., - weakref_slot: bool = ..., - str: bool = ..., - auto_attribs: bool = ..., - kw_only: bool = ..., - cache_hash: bool = ..., - auto_exc: bool = ..., - eq: Optional[_EqOrderType] = ..., - order: Optional[_EqOrderType] = ..., - auto_detect: bool = ..., - collect_by_mro: bool = ..., - getstate_setstate: Optional[bool] = ..., - on_setattr: Optional[_OnSetAttrArgType] = ..., - field_transformer: Optional[_FieldTransformer] = ..., - match_args: bool = ..., - unsafe_hash: Optional[bool] = ..., -) -> _C: ... -@overload -@dataclass_transform(order_default=True, field_specifiers=(attrib, field)) -def attrs( - maybe_cls: None = ..., - these: Optional[Dict[str, Any]] = ..., - repr_ns: Optional[str] = ..., - repr: bool = ..., - cmp: Optional[_EqOrderType] = ..., - hash: Optional[bool] = ..., - init: bool = ..., - slots: bool = ..., - frozen: bool = ..., - weakref_slot: bool = ..., - str: bool = ..., - auto_attribs: bool = ..., - kw_only: bool = ..., - cache_hash: bool = ..., - auto_exc: bool = ..., - eq: Optional[_EqOrderType] = ..., - order: Optional[_EqOrderType] = ..., - auto_detect: bool = ..., - collect_by_mro: bool = ..., - getstate_setstate: Optional[bool] = ..., - on_setattr: Optional[_OnSetAttrArgType] = ..., - field_transformer: Optional[_FieldTransformer] = ..., - match_args: bool = ..., - unsafe_hash: Optional[bool] = ..., -) -> Callable[[_C], _C]: ... -@overload -@dataclass_transform(field_specifiers=(attrib, field)) -def define( - maybe_cls: _C, - *, - these: Optional[Dict[str, Any]] = ..., - repr: bool = ..., - unsafe_hash: Optional[bool] = ..., - hash: Optional[bool] = ..., - init: bool = ..., - slots: bool = ..., - frozen: bool = ..., - weakref_slot: bool = ..., - str: bool = ..., - auto_attribs: bool = ..., - kw_only: bool = ..., - cache_hash: bool = ..., - auto_exc: bool = ..., - eq: Optional[bool] = ..., - order: Optional[bool] = ..., - auto_detect: bool = ..., - getstate_setstate: Optional[bool] = ..., - on_setattr: Optional[_OnSetAttrArgType] = ..., - field_transformer: Optional[_FieldTransformer] = ..., - match_args: bool = ..., -) -> _C: ... -@overload -@dataclass_transform(field_specifiers=(attrib, field)) -def define( - maybe_cls: None = ..., - *, - these: Optional[Dict[str, Any]] = ..., - repr: bool = ..., - unsafe_hash: Optional[bool] = ..., - hash: Optional[bool] = ..., - init: bool = ..., - slots: bool = ..., - frozen: bool = ..., - weakref_slot: bool = ..., - str: bool = ..., - auto_attribs: bool = ..., - kw_only: bool = ..., - cache_hash: bool = ..., - auto_exc: bool = ..., - eq: Optional[bool] = ..., - order: Optional[bool] = ..., - auto_detect: bool = ..., - getstate_setstate: Optional[bool] = ..., - on_setattr: Optional[_OnSetAttrArgType] = ..., - field_transformer: Optional[_FieldTransformer] = ..., - match_args: bool = ..., -) -> Callable[[_C], _C]: ... - -mutable = define - -@overload -@dataclass_transform(frozen_default=True, field_specifiers=(attrib, field)) -def frozen( - maybe_cls: _C, - *, - these: Optional[Dict[str, Any]] = ..., - repr: bool = ..., - unsafe_hash: Optional[bool] = ..., - hash: Optional[bool] = ..., - init: bool = ..., - slots: bool = ..., - frozen: bool = ..., - weakref_slot: bool = ..., - str: bool = ..., - auto_attribs: bool = ..., - kw_only: bool = ..., - cache_hash: bool = ..., - auto_exc: bool = ..., - eq: Optional[bool] = ..., - order: Optional[bool] = ..., - auto_detect: bool = ..., - getstate_setstate: Optional[bool] = ..., - on_setattr: Optional[_OnSetAttrArgType] = ..., - field_transformer: Optional[_FieldTransformer] = ..., - match_args: bool = ..., -) -> _C: ... -@overload -@dataclass_transform(frozen_default=True, field_specifiers=(attrib, field)) -def frozen( - maybe_cls: None = ..., - *, - these: Optional[Dict[str, Any]] = ..., - repr: bool = ..., - unsafe_hash: Optional[bool] = ..., - hash: Optional[bool] = ..., - init: bool = ..., - slots: bool = ..., - frozen: bool = ..., - weakref_slot: bool = ..., - str: bool = ..., - auto_attribs: bool = ..., - kw_only: bool = ..., - cache_hash: bool = ..., - auto_exc: bool = ..., - eq: Optional[bool] = ..., - order: Optional[bool] = ..., - auto_detect: bool = ..., - getstate_setstate: Optional[bool] = ..., - on_setattr: Optional[_OnSetAttrArgType] = ..., - field_transformer: Optional[_FieldTransformer] = ..., - match_args: bool = ..., -) -> Callable[[_C], _C]: ... -def fields(cls: Type[AttrsInstance]) -> Any: ... -def fields_dict(cls: Type[AttrsInstance]) -> Dict[str, Attribute[Any]]: ... -def validate(inst: AttrsInstance) -> None: ... -def resolve_types( - cls: _A, - globalns: Optional[Dict[str, Any]] = ..., - localns: Optional[Dict[str, Any]] = ..., - attribs: Optional[List[Attribute[Any]]] = ..., - include_extras: bool = ..., -) -> _A: ... - -# TODO: add support for returning a proper attrs class from the mypy plugin -# we use Any instead of _CountingAttr so that e.g. `make_class('Foo', -# [attr.ib()])` is valid -def make_class( - name: str, - attrs: Union[List[str], Tuple[str, ...], Dict[str, Any]], - bases: Tuple[type, ...] = ..., - class_body: Optional[Dict[str, Any]] = ..., - repr_ns: Optional[str] = ..., - repr: bool = ..., - cmp: Optional[_EqOrderType] = ..., - hash: Optional[bool] = ..., - init: bool = ..., - slots: bool = ..., - frozen: bool = ..., - weakref_slot: bool = ..., - str: bool = ..., - auto_attribs: bool = ..., - kw_only: bool = ..., - cache_hash: bool = ..., - auto_exc: bool = ..., - eq: Optional[_EqOrderType] = ..., - order: Optional[_EqOrderType] = ..., - collect_by_mro: bool = ..., - on_setattr: Optional[_OnSetAttrArgType] = ..., - field_transformer: Optional[_FieldTransformer] = ..., -) -> type: ... - -# _funcs -- - -# TODO: add support for returning TypedDict from the mypy plugin -# FIXME: asdict/astuple do not honor their factory args. Waiting on one of -# these: -# https://github.com/python/mypy/issues/4236 -# https://github.com/python/typing/issues/253 -# XXX: remember to fix attrs.asdict/astuple too! -def asdict( - inst: AttrsInstance, - recurse: bool = ..., - filter: Optional[_FilterType[Any]] = ..., - dict_factory: Type[Mapping[Any, Any]] = ..., - retain_collection_types: bool = ..., - value_serializer: Optional[ - Callable[[type, Attribute[Any], Any], Any] - ] = ..., - tuple_keys: Optional[bool] = ..., -) -> Dict[str, Any]: ... - -# TODO: add support for returning NamedTuple from the mypy plugin -def astuple( - inst: AttrsInstance, - recurse: bool = ..., - filter: Optional[_FilterType[Any]] = ..., - tuple_factory: Type[Sequence[Any]] = ..., - retain_collection_types: bool = ..., -) -> Tuple[Any, ...]: ... -def has(cls: type) -> TypeGuard[Type[AttrsInstance]]: ... -def assoc(inst: _T, **changes: Any) -> _T: ... -def evolve(inst: _T, **changes: Any) -> _T: ... - -# _config -- - -def set_run_validators(run: bool) -> None: ... -def get_run_validators() -> bool: ... - -# aliases -- - -s = attributes = attrs -ib = attr = attrib -dataclass = attrs # Technically, partial(attrs, auto_attribs=True) ;) diff --git a/.venv/Lib/site-packages/attr/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/attr/__pycache__/__init__.cpython-311.pyc deleted file mode 100644 index fb2628d..0000000 Binary files a/.venv/Lib/site-packages/attr/__pycache__/__init__.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/attr/__pycache__/_cmp.cpython-311.pyc b/.venv/Lib/site-packages/attr/__pycache__/_cmp.cpython-311.pyc deleted file mode 100644 index da5b406..0000000 Binary files a/.venv/Lib/site-packages/attr/__pycache__/_cmp.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/attr/__pycache__/_compat.cpython-311.pyc b/.venv/Lib/site-packages/attr/__pycache__/_compat.cpython-311.pyc deleted file mode 100644 index 1909b8e..0000000 Binary files a/.venv/Lib/site-packages/attr/__pycache__/_compat.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/attr/__pycache__/_config.cpython-311.pyc b/.venv/Lib/site-packages/attr/__pycache__/_config.cpython-311.pyc deleted file mode 100644 index 2957d23..0000000 Binary files a/.venv/Lib/site-packages/attr/__pycache__/_config.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/attr/__pycache__/_funcs.cpython-311.pyc b/.venv/Lib/site-packages/attr/__pycache__/_funcs.cpython-311.pyc deleted file mode 100644 index a9faa1b..0000000 Binary files a/.venv/Lib/site-packages/attr/__pycache__/_funcs.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/attr/__pycache__/_make.cpython-311.pyc b/.venv/Lib/site-packages/attr/__pycache__/_make.cpython-311.pyc deleted file mode 100644 index 1f3b30d..0000000 Binary files a/.venv/Lib/site-packages/attr/__pycache__/_make.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/attr/__pycache__/_next_gen.cpython-311.pyc b/.venv/Lib/site-packages/attr/__pycache__/_next_gen.cpython-311.pyc deleted file mode 100644 index 6ebc026..0000000 Binary files a/.venv/Lib/site-packages/attr/__pycache__/_next_gen.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/attr/__pycache__/_version_info.cpython-311.pyc b/.venv/Lib/site-packages/attr/__pycache__/_version_info.cpython-311.pyc deleted file mode 100644 index a5bd70f..0000000 Binary files a/.venv/Lib/site-packages/attr/__pycache__/_version_info.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/attr/__pycache__/converters.cpython-311.pyc b/.venv/Lib/site-packages/attr/__pycache__/converters.cpython-311.pyc deleted file mode 100644 index 6643ca5..0000000 Binary files a/.venv/Lib/site-packages/attr/__pycache__/converters.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/attr/__pycache__/exceptions.cpython-311.pyc b/.venv/Lib/site-packages/attr/__pycache__/exceptions.cpython-311.pyc deleted file mode 100644 index 8de558e..0000000 Binary files a/.venv/Lib/site-packages/attr/__pycache__/exceptions.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/attr/__pycache__/filters.cpython-311.pyc b/.venv/Lib/site-packages/attr/__pycache__/filters.cpython-311.pyc deleted file mode 100644 index b29fe4a..0000000 Binary files a/.venv/Lib/site-packages/attr/__pycache__/filters.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/attr/__pycache__/setters.cpython-311.pyc b/.venv/Lib/site-packages/attr/__pycache__/setters.cpython-311.pyc deleted file mode 100644 index 4a79a06..0000000 Binary files a/.venv/Lib/site-packages/attr/__pycache__/setters.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/attr/__pycache__/validators.cpython-311.pyc b/.venv/Lib/site-packages/attr/__pycache__/validators.cpython-311.pyc deleted file mode 100644 index a304ae0..0000000 Binary files a/.venv/Lib/site-packages/attr/__pycache__/validators.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/attr/_cmp.py b/.venv/Lib/site-packages/attr/_cmp.py deleted file mode 100644 index a4a35e0..0000000 --- a/.venv/Lib/site-packages/attr/_cmp.py +++ /dev/null @@ -1,150 +0,0 @@ -# SPDX-License-Identifier: MIT - - -import functools -import types - -from ._make import _make_ne - - -_operation_names = {"eq": "==", "lt": "<", "le": "<=", "gt": ">", "ge": ">="} - - -def cmp_using( - eq=None, - lt=None, - le=None, - gt=None, - ge=None, - require_same_type=True, - class_name="Comparable", -): - """ - Create a class that can be passed into `attrs.field`'s ``eq``, ``order``, - and ``cmp`` arguments to customize field comparison. - - The resulting class will have a full set of ordering methods if at least - one of ``{lt, le, gt, ge}`` and ``eq`` are provided. - - :param Optional[callable] eq: `callable` used to evaluate equality of two - objects. - :param Optional[callable] lt: `callable` used to evaluate whether one - object is less than another object. - :param Optional[callable] le: `callable` used to evaluate whether one - object is less than or equal to another object. - :param Optional[callable] gt: `callable` used to evaluate whether one - object is greater than another object. - :param Optional[callable] ge: `callable` used to evaluate whether one - object is greater than or equal to another object. - - :param bool require_same_type: When `True`, equality and ordering methods - will return `NotImplemented` if objects are not of the same type. - - :param Optional[str] class_name: Name of class. Defaults to 'Comparable'. - - See `comparison` for more details. - - .. versionadded:: 21.1.0 - """ - - body = { - "__slots__": ["value"], - "__init__": _make_init(), - "_requirements": [], - "_is_comparable_to": _is_comparable_to, - } - - # Add operations. - num_order_functions = 0 - has_eq_function = False - - if eq is not None: - has_eq_function = True - body["__eq__"] = _make_operator("eq", eq) - body["__ne__"] = _make_ne() - - if lt is not None: - num_order_functions += 1 - body["__lt__"] = _make_operator("lt", lt) - - if le is not None: - num_order_functions += 1 - body["__le__"] = _make_operator("le", le) - - if gt is not None: - num_order_functions += 1 - body["__gt__"] = _make_operator("gt", gt) - - if ge is not None: - num_order_functions += 1 - body["__ge__"] = _make_operator("ge", ge) - - type_ = types.new_class( - class_name, (object,), {}, lambda ns: ns.update(body) - ) - - # Add same type requirement. - if require_same_type: - type_._requirements.append(_check_same_type) - - # Add total ordering if at least one operation was defined. - if 0 < num_order_functions < 4: - if not has_eq_function: - # functools.total_ordering requires __eq__ to be defined, - # so raise early error here to keep a nice stack. - msg = "eq must be define is order to complete ordering from lt, le, gt, ge." - raise ValueError(msg) - type_ = functools.total_ordering(type_) - - return type_ - - -def _make_init(): - """ - Create __init__ method. - """ - - def __init__(self, value): - """ - Initialize object with *value*. - """ - self.value = value - - return __init__ - - -def _make_operator(name, func): - """ - Create operator method. - """ - - def method(self, other): - if not self._is_comparable_to(other): - return NotImplemented - - result = func(self.value, other.value) - if result is NotImplemented: - return NotImplemented - - return result - - method.__name__ = f"__{name}__" - method.__doc__ = ( - f"Return a {_operation_names[name]} b. Computed by attrs." - ) - - return method - - -def _is_comparable_to(self, other): - """ - Check whether `other` is comparable to `self`. - """ - return all(func(self, other) for func in self._requirements) - - -def _check_same_type(self, other): - """ - Return True if *self* and *other* are of the same type, False otherwise. - """ - return other.value.__class__ is self.value.__class__ diff --git a/.venv/Lib/site-packages/attr/_cmp.pyi b/.venv/Lib/site-packages/attr/_cmp.pyi deleted file mode 100644 index f3dcdc1..0000000 --- a/.venv/Lib/site-packages/attr/_cmp.pyi +++ /dev/null @@ -1,13 +0,0 @@ -from typing import Any, Callable, Optional, Type - -_CompareWithType = Callable[[Any, Any], bool] - -def cmp_using( - eq: Optional[_CompareWithType] = ..., - lt: Optional[_CompareWithType] = ..., - le: Optional[_CompareWithType] = ..., - gt: Optional[_CompareWithType] = ..., - ge: Optional[_CompareWithType] = ..., - require_same_type: bool = ..., - class_name: str = ..., -) -> Type: ... diff --git a/.venv/Lib/site-packages/attr/_compat.py b/.venv/Lib/site-packages/attr/_compat.py deleted file mode 100644 index 46b05ca..0000000 --- a/.venv/Lib/site-packages/attr/_compat.py +++ /dev/null @@ -1,87 +0,0 @@ -# SPDX-License-Identifier: MIT - -import inspect -import platform -import sys -import threading - -from collections.abc import Mapping, Sequence # noqa: F401 -from typing import _GenericAlias - - -PYPY = platform.python_implementation() == "PyPy" -PY_3_8_PLUS = sys.version_info[:2] >= (3, 8) -PY_3_9_PLUS = sys.version_info[:2] >= (3, 9) -PY310 = sys.version_info[:2] >= (3, 10) -PY_3_12_PLUS = sys.version_info[:2] >= (3, 12) - - -if sys.version_info < (3, 8): - try: - from typing_extensions import Protocol - except ImportError: # pragma: no cover - Protocol = object -else: - from typing import Protocol # noqa: F401 - - -class _AnnotationExtractor: - """ - Extract type annotations from a callable, returning None whenever there - is none. - """ - - __slots__ = ["sig"] - - def __init__(self, callable): - try: - self.sig = inspect.signature(callable) - except (ValueError, TypeError): # inspect failed - self.sig = None - - def get_first_param_type(self): - """ - Return the type annotation of the first argument if it's not empty. - """ - if not self.sig: - return None - - params = list(self.sig.parameters.values()) - if params and params[0].annotation is not inspect.Parameter.empty: - return params[0].annotation - - return None - - def get_return_type(self): - """ - Return the return type if it's not empty. - """ - if ( - self.sig - and self.sig.return_annotation is not inspect.Signature.empty - ): - return self.sig.return_annotation - - return None - - -# Thread-local global to track attrs instances which are already being repr'd. -# This is needed because there is no other (thread-safe) way to pass info -# about the instances that are already being repr'd through the call stack -# in order to ensure we don't perform infinite recursion. -# -# For instance, if an instance contains a dict which contains that instance, -# we need to know that we're already repr'ing the outside instance from within -# the dict's repr() call. -# -# This lives here rather than in _make.py so that the functions in _make.py -# don't have a direct reference to the thread-local in their globals dict. -# If they have such a reference, it breaks cloudpickle. -repr_context = threading.local() - - -def get_generic_base(cl): - """If this is a generic class (A[str]), return the generic base for it.""" - if cl.__class__ is _GenericAlias: - return cl.__origin__ - return None diff --git a/.venv/Lib/site-packages/attr/_config.py b/.venv/Lib/site-packages/attr/_config.py deleted file mode 100644 index 9c245b1..0000000 --- a/.venv/Lib/site-packages/attr/_config.py +++ /dev/null @@ -1,31 +0,0 @@ -# SPDX-License-Identifier: MIT - -__all__ = ["set_run_validators", "get_run_validators"] - -_run_validators = True - - -def set_run_validators(run): - """ - Set whether or not validators are run. By default, they are run. - - .. deprecated:: 21.3.0 It will not be removed, but it also will not be - moved to new ``attrs`` namespace. Use `attrs.validators.set_disabled()` - instead. - """ - if not isinstance(run, bool): - msg = "'run' must be bool." - raise TypeError(msg) - global _run_validators - _run_validators = run - - -def get_run_validators(): - """ - Return whether or not validators are run. - - .. deprecated:: 21.3.0 It will not be removed, but it also will not be - moved to new ``attrs`` namespace. Use `attrs.validators.get_disabled()` - instead. - """ - return _run_validators diff --git a/.venv/Lib/site-packages/attr/_funcs.py b/.venv/Lib/site-packages/attr/_funcs.py deleted file mode 100644 index a888991..0000000 --- a/.venv/Lib/site-packages/attr/_funcs.py +++ /dev/null @@ -1,483 +0,0 @@ -# SPDX-License-Identifier: MIT - - -import copy - -from ._compat import PY_3_9_PLUS, get_generic_base -from ._make import NOTHING, _obj_setattr, fields -from .exceptions import AttrsAttributeNotFoundError - - -def asdict( - inst, - recurse=True, - filter=None, - dict_factory=dict, - retain_collection_types=False, - value_serializer=None, -): - """ - Return the *attrs* attribute values of *inst* as a dict. - - Optionally recurse into other *attrs*-decorated classes. - - :param inst: Instance of an *attrs*-decorated class. - :param bool recurse: Recurse into classes that are also - *attrs*-decorated. - :param callable filter: A callable whose return code determines whether an - attribute or element is included (``True``) or dropped (``False``). Is - called with the `attrs.Attribute` as the first argument and the - value as the second argument. - :param callable dict_factory: A callable to produce dictionaries from. For - example, to produce ordered dictionaries instead of normal Python - dictionaries, pass in ``collections.OrderedDict``. - :param bool retain_collection_types: Do not convert to ``list`` when - encountering an attribute whose type is ``tuple`` or ``set``. Only - meaningful if ``recurse`` is ``True``. - :param Optional[callable] value_serializer: A hook that is called for every - attribute or dict key/value. It receives the current instance, field - and value and must return the (updated) value. The hook is run *after* - the optional *filter* has been applied. - - :rtype: return type of *dict_factory* - - :raise attrs.exceptions.NotAnAttrsClassError: If *cls* is not an *attrs* - class. - - .. versionadded:: 16.0.0 *dict_factory* - .. versionadded:: 16.1.0 *retain_collection_types* - .. versionadded:: 20.3.0 *value_serializer* - .. versionadded:: 21.3.0 If a dict has a collection for a key, it is - serialized as a tuple. - """ - attrs = fields(inst.__class__) - rv = dict_factory() - for a in attrs: - v = getattr(inst, a.name) - if filter is not None and not filter(a, v): - continue - - if value_serializer is not None: - v = value_serializer(inst, a, v) - - if recurse is True: - if has(v.__class__): - rv[a.name] = asdict( - v, - recurse=True, - filter=filter, - dict_factory=dict_factory, - retain_collection_types=retain_collection_types, - value_serializer=value_serializer, - ) - elif isinstance(v, (tuple, list, set, frozenset)): - cf = v.__class__ if retain_collection_types is True else list - items = [ - _asdict_anything( - i, - is_key=False, - filter=filter, - dict_factory=dict_factory, - retain_collection_types=retain_collection_types, - value_serializer=value_serializer, - ) - for i in v - ] - try: - rv[a.name] = cf(items) - except TypeError: - if not issubclass(cf, tuple): - raise - # Workaround for TypeError: cf.__new__() missing 1 required - # positional argument (which appears, for a namedturle) - rv[a.name] = cf(*items) - elif isinstance(v, dict): - df = dict_factory - rv[a.name] = df( - ( - _asdict_anything( - kk, - is_key=True, - filter=filter, - dict_factory=df, - retain_collection_types=retain_collection_types, - value_serializer=value_serializer, - ), - _asdict_anything( - vv, - is_key=False, - filter=filter, - dict_factory=df, - retain_collection_types=retain_collection_types, - value_serializer=value_serializer, - ), - ) - for kk, vv in v.items() - ) - else: - rv[a.name] = v - else: - rv[a.name] = v - return rv - - -def _asdict_anything( - val, - is_key, - filter, - dict_factory, - retain_collection_types, - value_serializer, -): - """ - ``asdict`` only works on attrs instances, this works on anything. - """ - if getattr(val.__class__, "__attrs_attrs__", None) is not None: - # Attrs class. - rv = asdict( - val, - recurse=True, - filter=filter, - dict_factory=dict_factory, - retain_collection_types=retain_collection_types, - value_serializer=value_serializer, - ) - elif isinstance(val, (tuple, list, set, frozenset)): - if retain_collection_types is True: - cf = val.__class__ - elif is_key: - cf = tuple - else: - cf = list - - rv = cf( - [ - _asdict_anything( - i, - is_key=False, - filter=filter, - dict_factory=dict_factory, - retain_collection_types=retain_collection_types, - value_serializer=value_serializer, - ) - for i in val - ] - ) - elif isinstance(val, dict): - df = dict_factory - rv = df( - ( - _asdict_anything( - kk, - is_key=True, - filter=filter, - dict_factory=df, - retain_collection_types=retain_collection_types, - value_serializer=value_serializer, - ), - _asdict_anything( - vv, - is_key=False, - filter=filter, - dict_factory=df, - retain_collection_types=retain_collection_types, - value_serializer=value_serializer, - ), - ) - for kk, vv in val.items() - ) - else: - rv = val - if value_serializer is not None: - rv = value_serializer(None, None, rv) - - return rv - - -def astuple( - inst, - recurse=True, - filter=None, - tuple_factory=tuple, - retain_collection_types=False, -): - """ - Return the *attrs* attribute values of *inst* as a tuple. - - Optionally recurse into other *attrs*-decorated classes. - - :param inst: Instance of an *attrs*-decorated class. - :param bool recurse: Recurse into classes that are also - *attrs*-decorated. - :param callable filter: A callable whose return code determines whether an - attribute or element is included (``True``) or dropped (``False``). Is - called with the `attrs.Attribute` as the first argument and the - value as the second argument. - :param callable tuple_factory: A callable to produce tuples from. For - example, to produce lists instead of tuples. - :param bool retain_collection_types: Do not convert to ``list`` - or ``dict`` when encountering an attribute which type is - ``tuple``, ``dict`` or ``set``. Only meaningful if ``recurse`` is - ``True``. - - :rtype: return type of *tuple_factory* - - :raise attrs.exceptions.NotAnAttrsClassError: If *cls* is not an *attrs* - class. - - .. versionadded:: 16.2.0 - """ - attrs = fields(inst.__class__) - rv = [] - retain = retain_collection_types # Very long. :/ - for a in attrs: - v = getattr(inst, a.name) - if filter is not None and not filter(a, v): - continue - if recurse is True: - if has(v.__class__): - rv.append( - astuple( - v, - recurse=True, - filter=filter, - tuple_factory=tuple_factory, - retain_collection_types=retain, - ) - ) - elif isinstance(v, (tuple, list, set, frozenset)): - cf = v.__class__ if retain is True else list - items = [ - astuple( - j, - recurse=True, - filter=filter, - tuple_factory=tuple_factory, - retain_collection_types=retain, - ) - if has(j.__class__) - else j - for j in v - ] - try: - rv.append(cf(items)) - except TypeError: - if not issubclass(cf, tuple): - raise - # Workaround for TypeError: cf.__new__() missing 1 required - # positional argument (which appears, for a namedturle) - rv.append(cf(*items)) - elif isinstance(v, dict): - df = v.__class__ if retain is True else dict - rv.append( - df( - ( - astuple( - kk, - tuple_factory=tuple_factory, - retain_collection_types=retain, - ) - if has(kk.__class__) - else kk, - astuple( - vv, - tuple_factory=tuple_factory, - retain_collection_types=retain, - ) - if has(vv.__class__) - else vv, - ) - for kk, vv in v.items() - ) - ) - else: - rv.append(v) - else: - rv.append(v) - - return rv if tuple_factory is list else tuple_factory(rv) - - -def has(cls): - """ - Check whether *cls* is a class with *attrs* attributes. - - :param type cls: Class to introspect. - :raise TypeError: If *cls* is not a class. - - :rtype: bool - """ - attrs = getattr(cls, "__attrs_attrs__", None) - if attrs is not None: - return True - - # No attrs, maybe it's a specialized generic (A[str])? - generic_base = get_generic_base(cls) - if generic_base is not None: - generic_attrs = getattr(generic_base, "__attrs_attrs__", None) - if generic_attrs is not None: - # Stick it on here for speed next time. - cls.__attrs_attrs__ = generic_attrs - return generic_attrs is not None - return False - - -def assoc(inst, **changes): - """ - Copy *inst* and apply *changes*. - - This is different from `evolve` that applies the changes to the arguments - that create the new instance. - - `evolve`'s behavior is preferable, but there are `edge cases`_ where it - doesn't work. Therefore `assoc` is deprecated, but will not be removed. - - .. _`edge cases`: https://github.com/python-attrs/attrs/issues/251 - - :param inst: Instance of a class with *attrs* attributes. - :param changes: Keyword changes in the new copy. - - :return: A copy of inst with *changes* incorporated. - - :raise attrs.exceptions.AttrsAttributeNotFoundError: If *attr_name* - couldn't be found on *cls*. - :raise attrs.exceptions.NotAnAttrsClassError: If *cls* is not an *attrs* - class. - - .. deprecated:: 17.1.0 - Use `attrs.evolve` instead if you can. - This function will not be removed du to the slightly different approach - compared to `attrs.evolve`. - """ - new = copy.copy(inst) - attrs = fields(inst.__class__) - for k, v in changes.items(): - a = getattr(attrs, k, NOTHING) - if a is NOTHING: - msg = f"{k} is not an attrs attribute on {new.__class__}." - raise AttrsAttributeNotFoundError(msg) - _obj_setattr(new, k, v) - return new - - -def evolve(*args, **changes): - """ - Create a new instance, based on the first positional argument with - *changes* applied. - - :param inst: Instance of a class with *attrs* attributes. - :param changes: Keyword changes in the new copy. - - :return: A copy of inst with *changes* incorporated. - - :raise TypeError: If *attr_name* couldn't be found in the class - ``__init__``. - :raise attrs.exceptions.NotAnAttrsClassError: If *cls* is not an *attrs* - class. - - .. versionadded:: 17.1.0 - .. deprecated:: 23.1.0 - It is now deprecated to pass the instance using the keyword argument - *inst*. It will raise a warning until at least April 2024, after which - it will become an error. Always pass the instance as a positional - argument. - """ - # Try to get instance by positional argument first. - # Use changes otherwise and warn it'll break. - if args: - try: - (inst,) = args - except ValueError: - msg = f"evolve() takes 1 positional argument, but {len(args)} were given" - raise TypeError(msg) from None - else: - try: - inst = changes.pop("inst") - except KeyError: - msg = "evolve() missing 1 required positional argument: 'inst'" - raise TypeError(msg) from None - - import warnings - - warnings.warn( - "Passing the instance per keyword argument is deprecated and " - "will stop working in, or after, April 2024.", - DeprecationWarning, - stacklevel=2, - ) - - cls = inst.__class__ - attrs = fields(cls) - for a in attrs: - if not a.init: - continue - attr_name = a.name # To deal with private attributes. - init_name = a.alias - if init_name not in changes: - changes[init_name] = getattr(inst, attr_name) - - return cls(**changes) - - -def resolve_types( - cls, globalns=None, localns=None, attribs=None, include_extras=True -): - """ - Resolve any strings and forward annotations in type annotations. - - This is only required if you need concrete types in `Attribute`'s *type* - field. In other words, you don't need to resolve your types if you only - use them for static type checking. - - With no arguments, names will be looked up in the module in which the class - was created. If this is not what you want, e.g. if the name only exists - inside a method, you may pass *globalns* or *localns* to specify other - dictionaries in which to look up these names. See the docs of - `typing.get_type_hints` for more details. - - :param type cls: Class to resolve. - :param Optional[dict] globalns: Dictionary containing global variables. - :param Optional[dict] localns: Dictionary containing local variables. - :param Optional[list] attribs: List of attribs for the given class. - This is necessary when calling from inside a ``field_transformer`` - since *cls* is not an *attrs* class yet. - :param bool include_extras: Resolve more accurately, if possible. - Pass ``include_extras`` to ``typing.get_hints``, if supported by the - typing module. On supported Python versions (3.9+), this resolves the - types more accurately. - - :raise TypeError: If *cls* is not a class. - :raise attrs.exceptions.NotAnAttrsClassError: If *cls* is not an *attrs* - class and you didn't pass any attribs. - :raise NameError: If types cannot be resolved because of missing variables. - - :returns: *cls* so you can use this function also as a class decorator. - Please note that you have to apply it **after** `attrs.define`. That - means the decorator has to come in the line **before** `attrs.define`. - - .. versionadded:: 20.1.0 - .. versionadded:: 21.1.0 *attribs* - .. versionadded:: 23.1.0 *include_extras* - - """ - # Since calling get_type_hints is expensive we cache whether we've - # done it already. - if getattr(cls, "__attrs_types_resolved__", None) != cls: - import typing - - kwargs = {"globalns": globalns, "localns": localns} - - if PY_3_9_PLUS: - kwargs["include_extras"] = include_extras - - hints = typing.get_type_hints(cls, **kwargs) - for field in fields(cls) if attribs is None else attribs: - if field.name in hints: - # Since fields have been frozen we must work around it. - _obj_setattr(field, "type", hints[field.name]) - # We store the class we resolved so that subclasses know they haven't - # been resolved. - cls.__attrs_types_resolved__ = cls - - # Return the class so you can use it as a decorator too. - return cls diff --git a/.venv/Lib/site-packages/attr/_make.py b/.venv/Lib/site-packages/attr/_make.py deleted file mode 100644 index 10b4eca..0000000 --- a/.venv/Lib/site-packages/attr/_make.py +++ /dev/null @@ -1,3119 +0,0 @@ -# SPDX-License-Identifier: MIT - -import contextlib -import copy -import enum -import functools -import inspect -import itertools -import linecache -import sys -import types -import typing - -from operator import itemgetter - -# We need to import _compat itself in addition to the _compat members to avoid -# having the thread-local in the globals here. -from . import _compat, _config, setters -from ._compat import ( - PY310, - PY_3_8_PLUS, - _AnnotationExtractor, - get_generic_base, -) -from .exceptions import ( - DefaultAlreadySetError, - FrozenInstanceError, - NotAnAttrsClassError, - UnannotatedAttributeError, -) - - -# This is used at least twice, so cache it here. -_obj_setattr = object.__setattr__ -_init_converter_pat = "__attr_converter_%s" -_init_factory_pat = "__attr_factory_%s" -_classvar_prefixes = ( - "typing.ClassVar", - "t.ClassVar", - "ClassVar", - "typing_extensions.ClassVar", -) -# we don't use a double-underscore prefix because that triggers -# name mangling when trying to create a slot for the field -# (when slots=True) -_hash_cache_field = "_attrs_cached_hash" - -_empty_metadata_singleton = types.MappingProxyType({}) - -# Unique object for unequivocal getattr() defaults. -_sentinel = object() - -_ng_default_on_setattr = setters.pipe(setters.convert, setters.validate) - - -class _Nothing(enum.Enum): - """ - Sentinel to indicate the lack of a value when ``None`` is ambiguous. - - If extending attrs, you can use ``typing.Literal[NOTHING]`` to show - that a value may be ``NOTHING``. - - .. versionchanged:: 21.1.0 ``bool(NOTHING)`` is now False. - .. versionchanged:: 22.2.0 ``NOTHING`` is now an ``enum.Enum`` variant. - """ - - NOTHING = enum.auto() - - def __repr__(self): - return "NOTHING" - - def __bool__(self): - return False - - -NOTHING = _Nothing.NOTHING -""" -Sentinel to indicate the lack of a value when ``None`` is ambiguous. -""" - - -class _CacheHashWrapper(int): - """ - An integer subclass that pickles / copies as None - - This is used for non-slots classes with ``cache_hash=True``, to avoid - serializing a potentially (even likely) invalid hash value. Since ``None`` - is the default value for uncalculated hashes, whenever this is copied, - the copy's value for the hash should automatically reset. - - See GH #613 for more details. - """ - - def __reduce__(self, _none_constructor=type(None), _args=()): # noqa: B008 - return _none_constructor, _args - - -def attrib( - default=NOTHING, - validator=None, - repr=True, - cmp=None, - hash=None, - init=True, - metadata=None, - type=None, - converter=None, - factory=None, - kw_only=False, - eq=None, - order=None, - on_setattr=None, - alias=None, -): - """ - Create a new attribute on a class. - - .. warning:: - - Does *not* do anything unless the class is also decorated with `attr.s` - / `attrs.define` / and so on! - - Please consider using `attrs.field` in new code (``attr.ib`` will *never* - go away, though). - - :param default: A value that is used if an *attrs*-generated ``__init__`` - is used and no value is passed while instantiating or the attribute is - excluded using ``init=False``. - - If the value is an instance of `attrs.Factory`, its callable will be - used to construct a new value (useful for mutable data types like lists - or dicts). - - If a default is not set (or set manually to `attrs.NOTHING`), a value - *must* be supplied when instantiating; otherwise a `TypeError` will be - raised. - - The default can also be set using decorator notation as shown below. - - .. seealso:: `defaults` - - :param callable factory: Syntactic sugar for - ``default=attr.Factory(factory)``. - - :param validator: `callable` that is called by *attrs*-generated - ``__init__`` methods after the instance has been initialized. They - receive the initialized instance, the :func:`~attrs.Attribute`, and the - passed value. - - The return value is *not* inspected so the validator has to throw an - exception itself. - - If a `list` is passed, its items are treated as validators and must all - pass. - - Validators can be globally disabled and re-enabled using - `attrs.validators.get_disabled` / `attrs.validators.set_disabled`. - - The validator can also be set using decorator notation as shown below. - - .. seealso:: :ref:`validators` - - :type validator: `callable` or a `list` of `callable`\\ s. - - :param repr: Include this attribute in the generated ``__repr__`` method. - If ``True``, include the attribute; if ``False``, omit it. By default, - the built-in ``repr()`` function is used. To override how the attribute - value is formatted, pass a ``callable`` that takes a single value and - returns a string. Note that the resulting string is used as-is, i.e. it - will be used directly *instead* of calling ``repr()`` (the default). - :type repr: a `bool` or a `callable` to use a custom function. - - :param eq: If ``True`` (default), include this attribute in the generated - ``__eq__`` and ``__ne__`` methods that check two instances for - equality. To override how the attribute value is compared, pass a - ``callable`` that takes a single value and returns the value to be - compared. - - .. seealso:: `comparison` - :type eq: a `bool` or a `callable`. - - :param order: If ``True`` (default), include this attributes in the - generated ``__lt__``, ``__le__``, ``__gt__`` and ``__ge__`` methods. To - override how the attribute value is ordered, pass a ``callable`` that - takes a single value and returns the value to be ordered. - - .. seealso:: `comparison` - :type order: a `bool` or a `callable`. - - :param cmp: Setting *cmp* is equivalent to setting *eq* and *order* to the - same value. Must not be mixed with *eq* or *order*. - - .. seealso:: `comparison` - :type cmp: a `bool` or a `callable`. - - :param bool | None hash: Include this attribute in the generated - ``__hash__`` method. If ``None`` (default), mirror *eq*'s value. This - is the correct behavior according the Python spec. Setting this value - to anything else than ``None`` is *discouraged*. - - .. seealso:: `hashing` - :param bool init: Include this attribute in the generated ``__init__`` - method. It is possible to set this to ``False`` and set a default - value. In that case this attributed is unconditionally initialized - with the specified default value or factory. - - .. seealso:: `init` - :param callable converter: `callable` that is called by *attrs*-generated - ``__init__`` methods to convert attribute's value to the desired - format. It is given the passed-in value, and the returned value will - be used as the new value of the attribute. The value is converted - before being passed to the validator, if any. - - .. seealso:: :ref:`converters` - :param dict | None metadata: An arbitrary mapping, to be used by - third-party components. See `extending-metadata`. - - :param type: The type of the attribute. Nowadays, the preferred method to - specify the type is using a variable annotation (see :pep:`526`). This - argument is provided for backward compatibility. Regardless of the - approach used, the type will be stored on ``Attribute.type``. - - Please note that *attrs* doesn't do anything with this metadata by - itself. You can use it as part of your own code or for `static type - checking `. - :param bool kw_only: Make this attribute keyword-only in the generated - ``__init__`` (if ``init`` is ``False``, this parameter is ignored). - :param on_setattr: Allows to overwrite the *on_setattr* setting from - `attr.s`. If left `None`, the *on_setattr* value from `attr.s` is used. - Set to `attrs.setters.NO_OP` to run **no** `setattr` hooks for this - attribute -- regardless of the setting in `attr.s`. - :type on_setattr: `callable`, or a list of callables, or `None`, or - `attrs.setters.NO_OP` - :param str | None alias: Override this attribute's parameter name in the - generated ``__init__`` method. If left `None`, default to ``name`` - stripped of leading underscores. See `private-attributes`. - - .. versionadded:: 15.2.0 *convert* - .. versionadded:: 16.3.0 *metadata* - .. versionchanged:: 17.1.0 *validator* can be a ``list`` now. - .. versionchanged:: 17.1.0 - *hash* is ``None`` and therefore mirrors *eq* by default. - .. versionadded:: 17.3.0 *type* - .. deprecated:: 17.4.0 *convert* - .. versionadded:: 17.4.0 *converter* as a replacement for the deprecated - *convert* to achieve consistency with other noun-based arguments. - .. versionadded:: 18.1.0 - ``factory=f`` is syntactic sugar for ``default=attr.Factory(f)``. - .. versionadded:: 18.2.0 *kw_only* - .. versionchanged:: 19.2.0 *convert* keyword argument removed. - .. versionchanged:: 19.2.0 *repr* also accepts a custom callable. - .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01. - .. versionadded:: 19.2.0 *eq* and *order* - .. versionadded:: 20.1.0 *on_setattr* - .. versionchanged:: 20.3.0 *kw_only* backported to Python 2 - .. versionchanged:: 21.1.0 - *eq*, *order*, and *cmp* also accept a custom callable - .. versionchanged:: 21.1.0 *cmp* undeprecated - .. versionadded:: 22.2.0 *alias* - """ - eq, eq_key, order, order_key = _determine_attrib_eq_order( - cmp, eq, order, True - ) - - if hash is not None and hash is not True and hash is not False: - msg = "Invalid value for hash. Must be True, False, or None." - raise TypeError(msg) - - if factory is not None: - if default is not NOTHING: - msg = ( - "The `default` and `factory` arguments are mutually exclusive." - ) - raise ValueError(msg) - if not callable(factory): - msg = "The `factory` argument must be a callable." - raise ValueError(msg) - default = Factory(factory) - - if metadata is None: - metadata = {} - - # Apply syntactic sugar by auto-wrapping. - if isinstance(on_setattr, (list, tuple)): - on_setattr = setters.pipe(*on_setattr) - - if validator and isinstance(validator, (list, tuple)): - validator = and_(*validator) - - if converter and isinstance(converter, (list, tuple)): - converter = pipe(*converter) - - return _CountingAttr( - default=default, - validator=validator, - repr=repr, - cmp=None, - hash=hash, - init=init, - converter=converter, - metadata=metadata, - type=type, - kw_only=kw_only, - eq=eq, - eq_key=eq_key, - order=order, - order_key=order_key, - on_setattr=on_setattr, - alias=alias, - ) - - -def _compile_and_eval(script, globs, locs=None, filename=""): - """ - "Exec" the script with the given global (globs) and local (locs) variables. - """ - bytecode = compile(script, filename, "exec") - eval(bytecode, globs, locs) - - -def _make_method(name, script, filename, globs): - """ - Create the method with the script given and return the method object. - """ - locs = {} - - # In order of debuggers like PDB being able to step through the code, - # we add a fake linecache entry. - count = 1 - base_filename = filename - while True: - linecache_tuple = ( - len(script), - None, - script.splitlines(True), - filename, - ) - old_val = linecache.cache.setdefault(filename, linecache_tuple) - if old_val == linecache_tuple: - break - - filename = f"{base_filename[:-1]}-{count}>" - count += 1 - - _compile_and_eval(script, globs, locs, filename) - - return locs[name] - - -def _make_attr_tuple_class(cls_name, attr_names): - """ - Create a tuple subclass to hold `Attribute`s for an `attrs` class. - - The subclass is a bare tuple with properties for names. - - class MyClassAttributes(tuple): - __slots__ = () - x = property(itemgetter(0)) - """ - attr_class_name = f"{cls_name}Attributes" - attr_class_template = [ - f"class {attr_class_name}(tuple):", - " __slots__ = ()", - ] - if attr_names: - for i, attr_name in enumerate(attr_names): - attr_class_template.append( - f" {attr_name} = _attrs_property(_attrs_itemgetter({i}))" - ) - else: - attr_class_template.append(" pass") - globs = {"_attrs_itemgetter": itemgetter, "_attrs_property": property} - _compile_and_eval("\n".join(attr_class_template), globs) - return globs[attr_class_name] - - -# Tuple class for extracted attributes from a class definition. -# `base_attrs` is a subset of `attrs`. -_Attributes = _make_attr_tuple_class( - "_Attributes", - [ - # all attributes to build dunder methods for - "attrs", - # attributes that have been inherited - "base_attrs", - # map inherited attributes to their originating classes - "base_attrs_map", - ], -) - - -def _is_class_var(annot): - """ - Check whether *annot* is a typing.ClassVar. - - The string comparison hack is used to avoid evaluating all string - annotations which would put attrs-based classes at a performance - disadvantage compared to plain old classes. - """ - annot = str(annot) - - # Annotation can be quoted. - if annot.startswith(("'", '"')) and annot.endswith(("'", '"')): - annot = annot[1:-1] - - return annot.startswith(_classvar_prefixes) - - -def _has_own_attribute(cls, attrib_name): - """ - Check whether *cls* defines *attrib_name* (and doesn't just inherit it). - """ - attr = getattr(cls, attrib_name, _sentinel) - if attr is _sentinel: - return False - - for base_cls in cls.__mro__[1:]: - a = getattr(base_cls, attrib_name, None) - if attr is a: - return False - - return True - - -def _get_annotations(cls): - """ - Get annotations for *cls*. - """ - if _has_own_attribute(cls, "__annotations__"): - return cls.__annotations__ - - return {} - - -def _collect_base_attrs(cls, taken_attr_names): - """ - Collect attr.ibs from base classes of *cls*, except *taken_attr_names*. - """ - base_attrs = [] - base_attr_map = {} # A dictionary of base attrs to their classes. - - # Traverse the MRO and collect attributes. - for base_cls in reversed(cls.__mro__[1:-1]): - for a in getattr(base_cls, "__attrs_attrs__", []): - if a.inherited or a.name in taken_attr_names: - continue - - a = a.evolve(inherited=True) # noqa: PLW2901 - base_attrs.append(a) - base_attr_map[a.name] = base_cls - - # For each name, only keep the freshest definition i.e. the furthest at the - # back. base_attr_map is fine because it gets overwritten with every new - # instance. - filtered = [] - seen = set() - for a in reversed(base_attrs): - if a.name in seen: - continue - filtered.insert(0, a) - seen.add(a.name) - - return filtered, base_attr_map - - -def _collect_base_attrs_broken(cls, taken_attr_names): - """ - Collect attr.ibs from base classes of *cls*, except *taken_attr_names*. - - N.B. *taken_attr_names* will be mutated. - - Adhere to the old incorrect behavior. - - Notably it collects from the front and considers inherited attributes which - leads to the buggy behavior reported in #428. - """ - base_attrs = [] - base_attr_map = {} # A dictionary of base attrs to their classes. - - # Traverse the MRO and collect attributes. - for base_cls in cls.__mro__[1:-1]: - for a in getattr(base_cls, "__attrs_attrs__", []): - if a.name in taken_attr_names: - continue - - a = a.evolve(inherited=True) # noqa: PLW2901 - taken_attr_names.add(a.name) - base_attrs.append(a) - base_attr_map[a.name] = base_cls - - return base_attrs, base_attr_map - - -def _transform_attrs( - cls, these, auto_attribs, kw_only, collect_by_mro, field_transformer -): - """ - Transform all `_CountingAttr`s on a class into `Attribute`s. - - If *these* is passed, use that and don't look for them on the class. - - *collect_by_mro* is True, collect them in the correct MRO order, otherwise - use the old -- incorrect -- order. See #428. - - Return an `_Attributes`. - """ - cd = cls.__dict__ - anns = _get_annotations(cls) - - if these is not None: - ca_list = list(these.items()) - elif auto_attribs is True: - ca_names = { - name - for name, attr in cd.items() - if isinstance(attr, _CountingAttr) - } - ca_list = [] - annot_names = set() - for attr_name, type in anns.items(): - if _is_class_var(type): - continue - annot_names.add(attr_name) - a = cd.get(attr_name, NOTHING) - - if not isinstance(a, _CountingAttr): - a = attrib() if a is NOTHING else attrib(default=a) - ca_list.append((attr_name, a)) - - unannotated = ca_names - annot_names - if len(unannotated) > 0: - raise UnannotatedAttributeError( - "The following `attr.ib`s lack a type annotation: " - + ", ".join( - sorted(unannotated, key=lambda n: cd.get(n).counter) - ) - + "." - ) - else: - ca_list = sorted( - ( - (name, attr) - for name, attr in cd.items() - if isinstance(attr, _CountingAttr) - ), - key=lambda e: e[1].counter, - ) - - own_attrs = [ - Attribute.from_counting_attr( - name=attr_name, ca=ca, type=anns.get(attr_name) - ) - for attr_name, ca in ca_list - ] - - if collect_by_mro: - base_attrs, base_attr_map = _collect_base_attrs( - cls, {a.name for a in own_attrs} - ) - else: - base_attrs, base_attr_map = _collect_base_attrs_broken( - cls, {a.name for a in own_attrs} - ) - - if kw_only: - own_attrs = [a.evolve(kw_only=True) for a in own_attrs] - base_attrs = [a.evolve(kw_only=True) for a in base_attrs] - - attrs = base_attrs + own_attrs - - # Mandatory vs non-mandatory attr order only matters when they are part of - # the __init__ signature and when they aren't kw_only (which are moved to - # the end and can be mandatory or non-mandatory in any order, as they will - # be specified as keyword args anyway). Check the order of those attrs: - had_default = False - for a in (a for a in attrs if a.init is not False and a.kw_only is False): - if had_default is True and a.default is NOTHING: - msg = f"No mandatory attributes allowed after an attribute with a default value or factory. Attribute in question: {a!r}" - raise ValueError(msg) - - if had_default is False and a.default is not NOTHING: - had_default = True - - if field_transformer is not None: - attrs = field_transformer(cls, attrs) - - # Resolve default field alias after executing field_transformer. - # This allows field_transformer to differentiate between explicit vs - # default aliases and supply their own defaults. - attrs = [ - a.evolve(alias=_default_init_alias_for(a.name)) if not a.alias else a - for a in attrs - ] - - # Create AttrsClass *after* applying the field_transformer since it may - # add or remove attributes! - attr_names = [a.name for a in attrs] - AttrsClass = _make_attr_tuple_class(cls.__name__, attr_names) - - return _Attributes((AttrsClass(attrs), base_attrs, base_attr_map)) - - -def _make_cached_property_getattr( - cached_properties, - original_getattr, - cls, -): - lines = [ - # Wrapped to get `__class__` into closure cell for super() - # (It will be replaced with the newly constructed class after construction). - "def wrapper():", - " __class__ = _cls", - " def __getattr__(self, item, cached_properties=cached_properties, original_getattr=original_getattr, _cached_setattr_get=_cached_setattr_get):", - " func = cached_properties.get(item)", - " if func is not None:", - " result = func(self)", - " _setter = _cached_setattr_get(self)", - " _setter(item, result)", - " return result", - ] - if original_getattr is not None: - lines.append( - " return original_getattr(self, item)", - ) - else: - lines.extend( - [ - " if hasattr(super(), '__getattr__'):", - " return super().__getattr__(item)", - " original_error = f\"'{self.__class__.__name__}' object has no attribute '{item}'\"", - " raise AttributeError(original_error)", - ] - ) - - lines.extend( - [ - " return __getattr__", - "__getattr__ = wrapper()", - ] - ) - - unique_filename = _generate_unique_filename(cls, "getattr") - - glob = { - "cached_properties": cached_properties, - "_cached_setattr_get": _obj_setattr.__get__, - "_cls": cls, - "original_getattr": original_getattr, - } - - return _make_method( - "__getattr__", - "\n".join(lines), - unique_filename, - glob, - ) - - -def _frozen_setattrs(self, name, value): - """ - Attached to frozen classes as __setattr__. - """ - if isinstance(self, BaseException) and name in ( - "__cause__", - "__context__", - "__traceback__", - ): - BaseException.__setattr__(self, name, value) - return - - raise FrozenInstanceError() - - -def _frozen_delattrs(self, name): - """ - Attached to frozen classes as __delattr__. - """ - raise FrozenInstanceError() - - -class _ClassBuilder: - """ - Iteratively build *one* class. - """ - - __slots__ = ( - "_attr_names", - "_attrs", - "_base_attr_map", - "_base_names", - "_cache_hash", - "_cls", - "_cls_dict", - "_delete_attribs", - "_frozen", - "_has_pre_init", - "_pre_init_has_args", - "_has_post_init", - "_is_exc", - "_on_setattr", - "_slots", - "_weakref_slot", - "_wrote_own_setattr", - "_has_custom_setattr", - ) - - def __init__( - self, - cls, - these, - slots, - frozen, - weakref_slot, - getstate_setstate, - auto_attribs, - kw_only, - cache_hash, - is_exc, - collect_by_mro, - on_setattr, - has_custom_setattr, - field_transformer, - ): - attrs, base_attrs, base_map = _transform_attrs( - cls, - these, - auto_attribs, - kw_only, - collect_by_mro, - field_transformer, - ) - - self._cls = cls - self._cls_dict = dict(cls.__dict__) if slots else {} - self._attrs = attrs - self._base_names = {a.name for a in base_attrs} - self._base_attr_map = base_map - self._attr_names = tuple(a.name for a in attrs) - self._slots = slots - self._frozen = frozen - self._weakref_slot = weakref_slot - self._cache_hash = cache_hash - self._has_pre_init = bool(getattr(cls, "__attrs_pre_init__", False)) - self._pre_init_has_args = False - if self._has_pre_init: - # Check if the pre init method has more arguments than just `self` - # We want to pass arguments if pre init expects arguments - pre_init_func = cls.__attrs_pre_init__ - pre_init_signature = inspect.signature(pre_init_func) - self._pre_init_has_args = len(pre_init_signature.parameters) > 1 - self._has_post_init = bool(getattr(cls, "__attrs_post_init__", False)) - self._delete_attribs = not bool(these) - self._is_exc = is_exc - self._on_setattr = on_setattr - - self._has_custom_setattr = has_custom_setattr - self._wrote_own_setattr = False - - self._cls_dict["__attrs_attrs__"] = self._attrs - - if frozen: - self._cls_dict["__setattr__"] = _frozen_setattrs - self._cls_dict["__delattr__"] = _frozen_delattrs - - self._wrote_own_setattr = True - elif on_setattr in ( - _ng_default_on_setattr, - setters.validate, - setters.convert, - ): - has_validator = has_converter = False - for a in attrs: - if a.validator is not None: - has_validator = True - if a.converter is not None: - has_converter = True - - if has_validator and has_converter: - break - if ( - ( - on_setattr == _ng_default_on_setattr - and not (has_validator or has_converter) - ) - or (on_setattr == setters.validate and not has_validator) - or (on_setattr == setters.convert and not has_converter) - ): - # If class-level on_setattr is set to convert + validate, but - # there's no field to convert or validate, pretend like there's - # no on_setattr. - self._on_setattr = None - - if getstate_setstate: - ( - self._cls_dict["__getstate__"], - self._cls_dict["__setstate__"], - ) = self._make_getstate_setstate() - - def __repr__(self): - return f"<_ClassBuilder(cls={self._cls.__name__})>" - - if PY310: - import abc - - def build_class(self): - """ - Finalize class based on the accumulated configuration. - - Builder cannot be used after calling this method. - """ - if self._slots is True: - return self._create_slots_class() - - return self.abc.update_abstractmethods( - self._patch_original_class() - ) - - else: - - def build_class(self): - """ - Finalize class based on the accumulated configuration. - - Builder cannot be used after calling this method. - """ - if self._slots is True: - return self._create_slots_class() - - return self._patch_original_class() - - def _patch_original_class(self): - """ - Apply accumulated methods and return the class. - """ - cls = self._cls - base_names = self._base_names - - # Clean class of attribute definitions (`attr.ib()`s). - if self._delete_attribs: - for name in self._attr_names: - if ( - name not in base_names - and getattr(cls, name, _sentinel) is not _sentinel - ): - # An AttributeError can happen if a base class defines a - # class variable and we want to set an attribute with the - # same name by using only a type annotation. - with contextlib.suppress(AttributeError): - delattr(cls, name) - - # Attach our dunder methods. - for name, value in self._cls_dict.items(): - setattr(cls, name, value) - - # If we've inherited an attrs __setattr__ and don't write our own, - # reset it to object's. - if not self._wrote_own_setattr and getattr( - cls, "__attrs_own_setattr__", False - ): - cls.__attrs_own_setattr__ = False - - if not self._has_custom_setattr: - cls.__setattr__ = _obj_setattr - - return cls - - def _create_slots_class(self): - """ - Build and return a new class with a `__slots__` attribute. - """ - cd = { - k: v - for k, v in self._cls_dict.items() - if k not in (*tuple(self._attr_names), "__dict__", "__weakref__") - } - - # If our class doesn't have its own implementation of __setattr__ - # (either from the user or by us), check the bases, if one of them has - # an attrs-made __setattr__, that needs to be reset. We don't walk the - # MRO because we only care about our immediate base classes. - # XXX: This can be confused by subclassing a slotted attrs class with - # XXX: a non-attrs class and subclass the resulting class with an attrs - # XXX: class. See `test_slotted_confused` for details. For now that's - # XXX: OK with us. - if not self._wrote_own_setattr: - cd["__attrs_own_setattr__"] = False - - if not self._has_custom_setattr: - for base_cls in self._cls.__bases__: - if base_cls.__dict__.get("__attrs_own_setattr__", False): - cd["__setattr__"] = _obj_setattr - break - - # Traverse the MRO to collect existing slots - # and check for an existing __weakref__. - existing_slots = {} - weakref_inherited = False - for base_cls in self._cls.__mro__[1:-1]: - if base_cls.__dict__.get("__weakref__", None) is not None: - weakref_inherited = True - existing_slots.update( - { - name: getattr(base_cls, name) - for name in getattr(base_cls, "__slots__", []) - } - ) - - base_names = set(self._base_names) - - names = self._attr_names - if ( - self._weakref_slot - and "__weakref__" not in getattr(self._cls, "__slots__", ()) - and "__weakref__" not in names - and not weakref_inherited - ): - names += ("__weakref__",) - - if PY_3_8_PLUS: - cached_properties = { - name: cached_property.func - for name, cached_property in cd.items() - if isinstance(cached_property, functools.cached_property) - } - else: - # `functools.cached_property` was introduced in 3.8. - # So can't be used before this. - cached_properties = {} - - # Collect methods with a `__class__` reference that are shadowed in the new class. - # To know to update them. - additional_closure_functions_to_update = [] - if cached_properties: - # Add cached properties to names for slotting. - names += tuple(cached_properties.keys()) - - for name in cached_properties: - # Clear out function from class to avoid clashing. - del cd[name] - - class_annotations = _get_annotations(self._cls) - for name, func in cached_properties.items(): - annotation = inspect.signature(func).return_annotation - if annotation is not inspect.Parameter.empty: - class_annotations[name] = annotation - - original_getattr = cd.get("__getattr__") - if original_getattr is not None: - additional_closure_functions_to_update.append(original_getattr) - - cd["__getattr__"] = _make_cached_property_getattr( - cached_properties, original_getattr, self._cls - ) - - # We only add the names of attributes that aren't inherited. - # Setting __slots__ to inherited attributes wastes memory. - slot_names = [name for name in names if name not in base_names] - - # There are slots for attributes from current class - # that are defined in parent classes. - # As their descriptors may be overridden by a child class, - # we collect them here and update the class dict - reused_slots = { - slot: slot_descriptor - for slot, slot_descriptor in existing_slots.items() - if slot in slot_names - } - slot_names = [name for name in slot_names if name not in reused_slots] - cd.update(reused_slots) - if self._cache_hash: - slot_names.append(_hash_cache_field) - - cd["__slots__"] = tuple(slot_names) - - cd["__qualname__"] = self._cls.__qualname__ - - # Create new class based on old class and our methods. - cls = type(self._cls)(self._cls.__name__, self._cls.__bases__, cd) - - # The following is a fix for - # . - # If a method mentions `__class__` or uses the no-arg super(), the - # compiler will bake a reference to the class in the method itself - # as `method.__closure__`. Since we replace the class with a - # clone, we rewrite these references so it keeps working. - for item in itertools.chain( - cls.__dict__.values(), additional_closure_functions_to_update - ): - if isinstance(item, (classmethod, staticmethod)): - # Class- and staticmethods hide their functions inside. - # These might need to be rewritten as well. - closure_cells = getattr(item.__func__, "__closure__", None) - elif isinstance(item, property): - # Workaround for property `super()` shortcut (PY3-only). - # There is no universal way for other descriptors. - closure_cells = getattr(item.fget, "__closure__", None) - else: - closure_cells = getattr(item, "__closure__", None) - - if not closure_cells: # Catch None or the empty list. - continue - for cell in closure_cells: - try: - match = cell.cell_contents is self._cls - except ValueError: # noqa: PERF203 - # ValueError: Cell is empty - pass - else: - if match: - cell.cell_contents = cls - return cls - - def add_repr(self, ns): - self._cls_dict["__repr__"] = self._add_method_dunders( - _make_repr(self._attrs, ns, self._cls) - ) - return self - - def add_str(self): - repr = self._cls_dict.get("__repr__") - if repr is None: - msg = "__str__ can only be generated if a __repr__ exists." - raise ValueError(msg) - - def __str__(self): - return self.__repr__() - - self._cls_dict["__str__"] = self._add_method_dunders(__str__) - return self - - def _make_getstate_setstate(self): - """ - Create custom __setstate__ and __getstate__ methods. - """ - # __weakref__ is not writable. - state_attr_names = tuple( - an for an in self._attr_names if an != "__weakref__" - ) - - def slots_getstate(self): - """ - Automatically created by attrs. - """ - return {name: getattr(self, name) for name in state_attr_names} - - hash_caching_enabled = self._cache_hash - - def slots_setstate(self, state): - """ - Automatically created by attrs. - """ - __bound_setattr = _obj_setattr.__get__(self) - if isinstance(state, tuple): - # Backward compatibility with attrs instances pickled with - # attrs versions before v22.2.0 which stored tuples. - for name, value in zip(state_attr_names, state): - __bound_setattr(name, value) - else: - for name in state_attr_names: - if name in state: - __bound_setattr(name, state[name]) - - # The hash code cache is not included when the object is - # serialized, but it still needs to be initialized to None to - # indicate that the first call to __hash__ should be a cache - # miss. - if hash_caching_enabled: - __bound_setattr(_hash_cache_field, None) - - return slots_getstate, slots_setstate - - def make_unhashable(self): - self._cls_dict["__hash__"] = None - return self - - def add_hash(self): - self._cls_dict["__hash__"] = self._add_method_dunders( - _make_hash( - self._cls, - self._attrs, - frozen=self._frozen, - cache_hash=self._cache_hash, - ) - ) - - return self - - def add_init(self): - self._cls_dict["__init__"] = self._add_method_dunders( - _make_init( - self._cls, - self._attrs, - self._has_pre_init, - self._pre_init_has_args, - self._has_post_init, - self._frozen, - self._slots, - self._cache_hash, - self._base_attr_map, - self._is_exc, - self._on_setattr, - attrs_init=False, - ) - ) - - return self - - def add_match_args(self): - self._cls_dict["__match_args__"] = tuple( - field.name - for field in self._attrs - if field.init and not field.kw_only - ) - - def add_attrs_init(self): - self._cls_dict["__attrs_init__"] = self._add_method_dunders( - _make_init( - self._cls, - self._attrs, - self._has_pre_init, - self._pre_init_has_args, - self._has_post_init, - self._frozen, - self._slots, - self._cache_hash, - self._base_attr_map, - self._is_exc, - self._on_setattr, - attrs_init=True, - ) - ) - - return self - - def add_eq(self): - cd = self._cls_dict - - cd["__eq__"] = self._add_method_dunders( - _make_eq(self._cls, self._attrs) - ) - cd["__ne__"] = self._add_method_dunders(_make_ne()) - - return self - - def add_order(self): - cd = self._cls_dict - - cd["__lt__"], cd["__le__"], cd["__gt__"], cd["__ge__"] = ( - self._add_method_dunders(meth) - for meth in _make_order(self._cls, self._attrs) - ) - - return self - - def add_setattr(self): - if self._frozen: - return self - - sa_attrs = {} - for a in self._attrs: - on_setattr = a.on_setattr or self._on_setattr - if on_setattr and on_setattr is not setters.NO_OP: - sa_attrs[a.name] = a, on_setattr - - if not sa_attrs: - return self - - if self._has_custom_setattr: - # We need to write a __setattr__ but there already is one! - msg = "Can't combine custom __setattr__ with on_setattr hooks." - raise ValueError(msg) - - # docstring comes from _add_method_dunders - def __setattr__(self, name, val): - try: - a, hook = sa_attrs[name] - except KeyError: - nval = val - else: - nval = hook(self, a, val) - - _obj_setattr(self, name, nval) - - self._cls_dict["__attrs_own_setattr__"] = True - self._cls_dict["__setattr__"] = self._add_method_dunders(__setattr__) - self._wrote_own_setattr = True - - return self - - def _add_method_dunders(self, method): - """ - Add __module__ and __qualname__ to a *method* if possible. - """ - with contextlib.suppress(AttributeError): - method.__module__ = self._cls.__module__ - - with contextlib.suppress(AttributeError): - method.__qualname__ = f"{self._cls.__qualname__}.{method.__name__}" - - with contextlib.suppress(AttributeError): - method.__doc__ = ( - "Method generated by attrs for class " - f"{self._cls.__qualname__}." - ) - - return method - - -def _determine_attrs_eq_order(cmp, eq, order, default_eq): - """ - Validate the combination of *cmp*, *eq*, and *order*. Derive the effective - values of eq and order. If *eq* is None, set it to *default_eq*. - """ - if cmp is not None and any((eq is not None, order is not None)): - msg = "Don't mix `cmp` with `eq' and `order`." - raise ValueError(msg) - - # cmp takes precedence due to bw-compatibility. - if cmp is not None: - return cmp, cmp - - # If left None, equality is set to the specified default and ordering - # mirrors equality. - if eq is None: - eq = default_eq - - if order is None: - order = eq - - if eq is False and order is True: - msg = "`order` can only be True if `eq` is True too." - raise ValueError(msg) - - return eq, order - - -def _determine_attrib_eq_order(cmp, eq, order, default_eq): - """ - Validate the combination of *cmp*, *eq*, and *order*. Derive the effective - values of eq and order. If *eq* is None, set it to *default_eq*. - """ - if cmp is not None and any((eq is not None, order is not None)): - msg = "Don't mix `cmp` with `eq' and `order`." - raise ValueError(msg) - - def decide_callable_or_boolean(value): - """ - Decide whether a key function is used. - """ - if callable(value): - value, key = True, value - else: - key = None - return value, key - - # cmp takes precedence due to bw-compatibility. - if cmp is not None: - cmp, cmp_key = decide_callable_or_boolean(cmp) - return cmp, cmp_key, cmp, cmp_key - - # If left None, equality is set to the specified default and ordering - # mirrors equality. - if eq is None: - eq, eq_key = default_eq, None - else: - eq, eq_key = decide_callable_or_boolean(eq) - - if order is None: - order, order_key = eq, eq_key - else: - order, order_key = decide_callable_or_boolean(order) - - if eq is False and order is True: - msg = "`order` can only be True if `eq` is True too." - raise ValueError(msg) - - return eq, eq_key, order, order_key - - -def _determine_whether_to_implement( - cls, flag, auto_detect, dunders, default=True -): - """ - Check whether we should implement a set of methods for *cls*. - - *flag* is the argument passed into @attr.s like 'init', *auto_detect* the - same as passed into @attr.s and *dunders* is a tuple of attribute names - whose presence signal that the user has implemented it themselves. - - Return *default* if no reason for either for or against is found. - """ - if flag is True or flag is False: - return flag - - if flag is None and auto_detect is False: - return default - - # Logically, flag is None and auto_detect is True here. - for dunder in dunders: - if _has_own_attribute(cls, dunder): - return False - - return default - - -def attrs( - maybe_cls=None, - these=None, - repr_ns=None, - repr=None, - cmp=None, - hash=None, - init=None, - slots=False, - frozen=False, - weakref_slot=True, - str=False, - auto_attribs=False, - kw_only=False, - cache_hash=False, - auto_exc=False, - eq=None, - order=None, - auto_detect=False, - collect_by_mro=False, - getstate_setstate=None, - on_setattr=None, - field_transformer=None, - match_args=True, - unsafe_hash=None, -): - r""" - A class decorator that adds :term:`dunder methods` according to the - specified attributes using `attr.ib` or the *these* argument. - - Please consider using `attrs.define` / `attrs.frozen` in new code - (``attr.s`` will *never* go away, though). - - :param these: A dictionary of name to `attr.ib` mappings. This is useful - to avoid the definition of your attributes within the class body - because you can't (e.g. if you want to add ``__repr__`` methods to - Django models) or don't want to. - - If *these* is not ``None``, *attrs* will *not* search the class body - for attributes and will *not* remove any attributes from it. - - The order is deduced from the order of the attributes inside *these*. - - :type these: `dict` of `str` to `attr.ib` - - :param str repr_ns: When using nested classes, there's no way in Python 2 - to automatically detect that. Therefore it's possible to set the - namespace explicitly for a more meaningful ``repr`` output. - :param bool auto_detect: Instead of setting the *init*, *repr*, *eq*, - *order*, and *hash* arguments explicitly, assume they are set to - ``True`` **unless any** of the involved methods for one of the - arguments is implemented in the *current* class (i.e. it is *not* - inherited from some base class). - - So for example by implementing ``__eq__`` on a class yourself, *attrs* - will deduce ``eq=False`` and will create *neither* ``__eq__`` *nor* - ``__ne__`` (but Python classes come with a sensible ``__ne__`` by - default, so it *should* be enough to only implement ``__eq__`` in most - cases). - - .. warning:: - - If you prevent *attrs* from creating the ordering methods for you - (``order=False``, e.g. by implementing ``__le__``), it becomes - *your* responsibility to make sure its ordering is sound. The best - way is to use the `functools.total_ordering` decorator. - - - Passing ``True`` or ``False`` to *init*, *repr*, *eq*, *order*, *cmp*, - or *hash* overrides whatever *auto_detect* would determine. - - :param bool repr: Create a ``__repr__`` method with a human readable - representation of *attrs* attributes.. - :param bool str: Create a ``__str__`` method that is identical to - ``__repr__``. This is usually not necessary except for `Exception`\ s. - :param bool | None eq: If ``True`` or ``None`` (default), add ``__eq__`` - and ``__ne__`` methods that check two instances for equality. - - They compare the instances as if they were tuples of their *attrs* - attributes if and only if the types of both classes are *identical*! - - .. seealso:: `comparison` - :param bool | None order: If ``True``, add ``__lt__``, ``__le__``, - ``__gt__``, and ``__ge__`` methods that behave like *eq* above and - allow instances to be ordered. If ``None`` (default) mirror value of - *eq*. - - .. seealso:: `comparison` - :param bool | None cmp: Setting *cmp* is equivalent to setting *eq* and - *order* to the same value. Must not be mixed with *eq* or *order*. - - .. seealso:: `comparison` - :param bool | None unsafe_hash: If ``None`` (default), the ``__hash__`` - method is generated according how *eq* and *frozen* are set. - - 1. If *both* are True, *attrs* will generate a ``__hash__`` for you. - 2. If *eq* is True and *frozen* is False, ``__hash__`` will be set to - None, marking it unhashable (which it is). - 3. If *eq* is False, ``__hash__`` will be left untouched meaning the - ``__hash__`` method of the base class will be used (if base class is - ``object``, this means it will fall back to id-based hashing.). - - Although not recommended, you can decide for yourself and force *attrs* - to create one (e.g. if the class is immutable even though you didn't - freeze it programmatically) by passing ``True`` or not. Both of these - cases are rather special and should be used carefully. - - .. seealso:: - - - Our documentation on `hashing`, - - Python's documentation on `object.__hash__`, - - and the `GitHub issue that led to the default \ - behavior `_ for - more details. - - :param bool | None hash: Alias for *unsafe_hash*. *unsafe_hash* takes - precedence. - :param bool init: Create a ``__init__`` method that initializes the *attrs* - attributes. Leading underscores are stripped for the argument name. If - a ``__attrs_pre_init__`` method exists on the class, it will be called - before the class is initialized. If a ``__attrs_post_init__`` method - exists on the class, it will be called after the class is fully - initialized. - - If ``init`` is ``False``, an ``__attrs_init__`` method will be injected - instead. This allows you to define a custom ``__init__`` method that - can do pre-init work such as ``super().__init__()``, and then call - ``__attrs_init__()`` and ``__attrs_post_init__()``. - - .. seealso:: `init` - :param bool slots: Create a :term:`slotted class ` that's - more memory-efficient. Slotted classes are generally superior to the - default dict classes, but have some gotchas you should know about, so - we encourage you to read the :term:`glossary entry `. - :param bool frozen: Make instances immutable after initialization. If - someone attempts to modify a frozen instance, - `attrs.exceptions.FrozenInstanceError` is raised. - - .. note:: - - 1. This is achieved by installing a custom ``__setattr__`` method - on your class, so you can't implement your own. - - 2. True immutability is impossible in Python. - - 3. This *does* have a minor a runtime performance `impact - ` when initializing new instances. In other words: - ``__init__`` is slightly slower with ``frozen=True``. - - 4. If a class is frozen, you cannot modify ``self`` in - ``__attrs_post_init__`` or a self-written ``__init__``. You can - circumvent that limitation by using ``object.__setattr__(self, - "attribute_name", value)``. - - 5. Subclasses of a frozen class are frozen too. - - :param bool weakref_slot: Make instances weak-referenceable. This has no - effect unless ``slots`` is also enabled. - :param bool auto_attribs: If ``True``, collect :pep:`526`-annotated - attributes from the class body. - - In this case, you **must** annotate every field. If *attrs* encounters - a field that is set to an `attr.ib` but lacks a type annotation, an - `attr.exceptions.UnannotatedAttributeError` is raised. Use - ``field_name: typing.Any = attr.ib(...)`` if you don't want to set a - type. - - If you assign a value to those attributes (e.g. ``x: int = 42``), that - value becomes the default value like if it were passed using - ``attr.ib(default=42)``. Passing an instance of `attrs.Factory` also - works as expected in most cases (see warning below). - - Attributes annotated as `typing.ClassVar`, and attributes that are - neither annotated nor set to an `attr.ib` are **ignored**. - - .. warning:: - For features that use the attribute name to create decorators (e.g. - :ref:`validators `), you still *must* assign `attr.ib` - to them. Otherwise Python will either not find the name or try to - use the default value to call e.g. ``validator`` on it. - - These errors can be quite confusing and probably the most common bug - report on our bug tracker. - - :param bool kw_only: Make all attributes keyword-only in the generated - ``__init__`` (if ``init`` is ``False``, this parameter is ignored). - :param bool cache_hash: Ensure that the object's hash code is computed only - once and stored on the object. If this is set to ``True``, hashing - must be either explicitly or implicitly enabled for this class. If the - hash code is cached, avoid any reassignments of fields involved in hash - code computation or mutations of the objects those fields point to - after object creation. If such changes occur, the behavior of the - object's hash code is undefined. - :param bool auto_exc: If the class subclasses `BaseException` (which - implicitly includes any subclass of any exception), the following - happens to behave like a well-behaved Python exceptions class: - - - the values for *eq*, *order*, and *hash* are ignored and the - instances compare and hash by the instance's ids (N.B. *attrs* will - *not* remove existing implementations of ``__hash__`` or the equality - methods. It just won't add own ones.), - - all attributes that are either passed into ``__init__`` or have a - default value are additionally available as a tuple in the ``args`` - attribute, - - the value of *str* is ignored leaving ``__str__`` to base classes. - :param bool collect_by_mro: Setting this to `True` fixes the way *attrs* - collects attributes from base classes. The default behavior is - incorrect in certain cases of multiple inheritance. It should be on by - default but is kept off for backward-compatibility. - - .. seealso:: - Issue `#428 `_ - - :param bool | None getstate_setstate: - .. note:: - This is usually only interesting for slotted classes and you should - probably just set *auto_detect* to `True`. - - If `True`, ``__getstate__`` and ``__setstate__`` are generated and - attached to the class. This is necessary for slotted classes to be - pickleable. If left `None`, it's `True` by default for slotted classes - and ``False`` for dict classes. - - If *auto_detect* is `True`, and *getstate_setstate* is left `None`, and - **either** ``__getstate__`` or ``__setstate__`` is detected directly on - the class (i.e. not inherited), it is set to `False` (this is usually - what you want). - - :param on_setattr: A callable that is run whenever the user attempts to set - an attribute (either by assignment like ``i.x = 42`` or by using - `setattr` like ``setattr(i, "x", 42)``). It receives the same arguments - as validators: the instance, the attribute that is being modified, and - the new value. - - If no exception is raised, the attribute is set to the return value of - the callable. - - If a list of callables is passed, they're automatically wrapped in an - `attrs.setters.pipe`. - :type on_setattr: `callable`, or a list of callables, or `None`, or - `attrs.setters.NO_OP` - - :param callable | None field_transformer: - A function that is called with the original class object and all fields - right before *attrs* finalizes the class. You can use this, e.g., to - automatically add converters or validators to fields based on their - types. - - .. seealso:: `transform-fields` - - :param bool match_args: - If `True` (default), set ``__match_args__`` on the class to support - :pep:`634` (Structural Pattern Matching). It is a tuple of all - non-keyword-only ``__init__`` parameter names on Python 3.10 and later. - Ignored on older Python versions. - - .. versionadded:: 16.0.0 *slots* - .. versionadded:: 16.1.0 *frozen* - .. versionadded:: 16.3.0 *str* - .. versionadded:: 16.3.0 Support for ``__attrs_post_init__``. - .. versionchanged:: 17.1.0 - *hash* supports ``None`` as value which is also the default now. - .. versionadded:: 17.3.0 *auto_attribs* - .. versionchanged:: 18.1.0 - If *these* is passed, no attributes are deleted from the class body. - .. versionchanged:: 18.1.0 If *these* is ordered, the order is retained. - .. versionadded:: 18.2.0 *weakref_slot* - .. deprecated:: 18.2.0 - ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now raise a - `DeprecationWarning` if the classes compared are subclasses of - each other. ``__eq`` and ``__ne__`` never tried to compared subclasses - to each other. - .. versionchanged:: 19.2.0 - ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now do not consider - subclasses comparable anymore. - .. versionadded:: 18.2.0 *kw_only* - .. versionadded:: 18.2.0 *cache_hash* - .. versionadded:: 19.1.0 *auto_exc* - .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01. - .. versionadded:: 19.2.0 *eq* and *order* - .. versionadded:: 20.1.0 *auto_detect* - .. versionadded:: 20.1.0 *collect_by_mro* - .. versionadded:: 20.1.0 *getstate_setstate* - .. versionadded:: 20.1.0 *on_setattr* - .. versionadded:: 20.3.0 *field_transformer* - .. versionchanged:: 21.1.0 - ``init=False`` injects ``__attrs_init__`` - .. versionchanged:: 21.1.0 Support for ``__attrs_pre_init__`` - .. versionchanged:: 21.1.0 *cmp* undeprecated - .. versionadded:: 21.3.0 *match_args* - .. versionadded:: 22.2.0 - *unsafe_hash* as an alias for *hash* (for :pep:`681` compliance). - """ - eq_, order_ = _determine_attrs_eq_order(cmp, eq, order, None) - - # unsafe_hash takes precedence due to PEP 681. - if unsafe_hash is not None: - hash = unsafe_hash - - if isinstance(on_setattr, (list, tuple)): - on_setattr = setters.pipe(*on_setattr) - - def wrap(cls): - is_frozen = frozen or _has_frozen_base_class(cls) - is_exc = auto_exc is True and issubclass(cls, BaseException) - has_own_setattr = auto_detect and _has_own_attribute( - cls, "__setattr__" - ) - - if has_own_setattr and is_frozen: - msg = "Can't freeze a class with a custom __setattr__." - raise ValueError(msg) - - builder = _ClassBuilder( - cls, - these, - slots, - is_frozen, - weakref_slot, - _determine_whether_to_implement( - cls, - getstate_setstate, - auto_detect, - ("__getstate__", "__setstate__"), - default=slots, - ), - auto_attribs, - kw_only, - cache_hash, - is_exc, - collect_by_mro, - on_setattr, - has_own_setattr, - field_transformer, - ) - if _determine_whether_to_implement( - cls, repr, auto_detect, ("__repr__",) - ): - builder.add_repr(repr_ns) - if str is True: - builder.add_str() - - eq = _determine_whether_to_implement( - cls, eq_, auto_detect, ("__eq__", "__ne__") - ) - if not is_exc and eq is True: - builder.add_eq() - if not is_exc and _determine_whether_to_implement( - cls, order_, auto_detect, ("__lt__", "__le__", "__gt__", "__ge__") - ): - builder.add_order() - - builder.add_setattr() - - nonlocal hash - if ( - hash is None - and auto_detect is True - and _has_own_attribute(cls, "__hash__") - ): - hash = False - - if hash is not True and hash is not False and hash is not None: - # Can't use `hash in` because 1 == True for example. - msg = "Invalid value for hash. Must be True, False, or None." - raise TypeError(msg) - - if hash is False or (hash is None and eq is False) or is_exc: - # Don't do anything. Should fall back to __object__'s __hash__ - # which is by id. - if cache_hash: - msg = "Invalid value for cache_hash. To use hash caching, hashing must be either explicitly or implicitly enabled." - raise TypeError(msg) - elif hash is True or ( - hash is None and eq is True and is_frozen is True - ): - # Build a __hash__ if told so, or if it's safe. - builder.add_hash() - else: - # Raise TypeError on attempts to hash. - if cache_hash: - msg = "Invalid value for cache_hash. To use hash caching, hashing must be either explicitly or implicitly enabled." - raise TypeError(msg) - builder.make_unhashable() - - if _determine_whether_to_implement( - cls, init, auto_detect, ("__init__",) - ): - builder.add_init() - else: - builder.add_attrs_init() - if cache_hash: - msg = "Invalid value for cache_hash. To use hash caching, init must be True." - raise TypeError(msg) - - if ( - PY310 - and match_args - and not _has_own_attribute(cls, "__match_args__") - ): - builder.add_match_args() - - return builder.build_class() - - # maybe_cls's type depends on the usage of the decorator. It's a class - # if it's used as `@attrs` but ``None`` if used as `@attrs()`. - if maybe_cls is None: - return wrap - - return wrap(maybe_cls) - - -_attrs = attrs -""" -Internal alias so we can use it in functions that take an argument called -*attrs*. -""" - - -def _has_frozen_base_class(cls): - """ - Check whether *cls* has a frozen ancestor by looking at its - __setattr__. - """ - return cls.__setattr__ is _frozen_setattrs - - -def _generate_unique_filename(cls, func_name): - """ - Create a "filename" suitable for a function being generated. - """ - return ( - f"" - ) - - -def _make_hash(cls, attrs, frozen, cache_hash): - attrs = tuple( - a for a in attrs if a.hash is True or (a.hash is None and a.eq is True) - ) - - tab = " " - - unique_filename = _generate_unique_filename(cls, "hash") - type_hash = hash(unique_filename) - # If eq is custom generated, we need to include the functions in globs - globs = {} - - hash_def = "def __hash__(self" - hash_func = "hash((" - closing_braces = "))" - if not cache_hash: - hash_def += "):" - else: - hash_def += ", *" - - hash_def += ", _cache_wrapper=__import__('attr._make')._make._CacheHashWrapper):" - hash_func = "_cache_wrapper(" + hash_func - closing_braces += ")" - - method_lines = [hash_def] - - def append_hash_computation_lines(prefix, indent): - """ - Generate the code for actually computing the hash code. - Below this will either be returned directly or used to compute - a value which is then cached, depending on the value of cache_hash - """ - - method_lines.extend( - [ - indent + prefix + hash_func, - indent + f" {type_hash},", - ] - ) - - for a in attrs: - if a.eq_key: - cmp_name = f"_{a.name}_key" - globs[cmp_name] = a.eq_key - method_lines.append( - indent + f" {cmp_name}(self.{a.name})," - ) - else: - method_lines.append(indent + f" self.{a.name},") - - method_lines.append(indent + " " + closing_braces) - - if cache_hash: - method_lines.append(tab + f"if self.{_hash_cache_field} is None:") - if frozen: - append_hash_computation_lines( - f"object.__setattr__(self, '{_hash_cache_field}', ", tab * 2 - ) - method_lines.append(tab * 2 + ")") # close __setattr__ - else: - append_hash_computation_lines( - f"self.{_hash_cache_field} = ", tab * 2 - ) - method_lines.append(tab + f"return self.{_hash_cache_field}") - else: - append_hash_computation_lines("return ", tab) - - script = "\n".join(method_lines) - return _make_method("__hash__", script, unique_filename, globs) - - -def _add_hash(cls, attrs): - """ - Add a hash method to *cls*. - """ - cls.__hash__ = _make_hash(cls, attrs, frozen=False, cache_hash=False) - return cls - - -def _make_ne(): - """ - Create __ne__ method. - """ - - def __ne__(self, other): - """ - Check equality and either forward a NotImplemented or - return the result negated. - """ - result = self.__eq__(other) - if result is NotImplemented: - return NotImplemented - - return not result - - return __ne__ - - -def _make_eq(cls, attrs): - """ - Create __eq__ method for *cls* with *attrs*. - """ - attrs = [a for a in attrs if a.eq] - - unique_filename = _generate_unique_filename(cls, "eq") - lines = [ - "def __eq__(self, other):", - " if other.__class__ is not self.__class__:", - " return NotImplemented", - ] - - # We can't just do a big self.x = other.x and... clause due to - # irregularities like nan == nan is false but (nan,) == (nan,) is true. - globs = {} - if attrs: - lines.append(" return (") - others = [" ) == ("] - for a in attrs: - if a.eq_key: - cmp_name = f"_{a.name}_key" - # Add the key function to the global namespace - # of the evaluated function. - globs[cmp_name] = a.eq_key - lines.append(f" {cmp_name}(self.{a.name}),") - others.append(f" {cmp_name}(other.{a.name}),") - else: - lines.append(f" self.{a.name},") - others.append(f" other.{a.name},") - - lines += [*others, " )"] - else: - lines.append(" return True") - - script = "\n".join(lines) - - return _make_method("__eq__", script, unique_filename, globs) - - -def _make_order(cls, attrs): - """ - Create ordering methods for *cls* with *attrs*. - """ - attrs = [a for a in attrs if a.order] - - def attrs_to_tuple(obj): - """ - Save us some typing. - """ - return tuple( - key(value) if key else value - for value, key in ( - (getattr(obj, a.name), a.order_key) for a in attrs - ) - ) - - def __lt__(self, other): - """ - Automatically created by attrs. - """ - if other.__class__ is self.__class__: - return attrs_to_tuple(self) < attrs_to_tuple(other) - - return NotImplemented - - def __le__(self, other): - """ - Automatically created by attrs. - """ - if other.__class__ is self.__class__: - return attrs_to_tuple(self) <= attrs_to_tuple(other) - - return NotImplemented - - def __gt__(self, other): - """ - Automatically created by attrs. - """ - if other.__class__ is self.__class__: - return attrs_to_tuple(self) > attrs_to_tuple(other) - - return NotImplemented - - def __ge__(self, other): - """ - Automatically created by attrs. - """ - if other.__class__ is self.__class__: - return attrs_to_tuple(self) >= attrs_to_tuple(other) - - return NotImplemented - - return __lt__, __le__, __gt__, __ge__ - - -def _add_eq(cls, attrs=None): - """ - Add equality methods to *cls* with *attrs*. - """ - if attrs is None: - attrs = cls.__attrs_attrs__ - - cls.__eq__ = _make_eq(cls, attrs) - cls.__ne__ = _make_ne() - - return cls - - -def _make_repr(attrs, ns, cls): - unique_filename = _generate_unique_filename(cls, "repr") - # Figure out which attributes to include, and which function to use to - # format them. The a.repr value can be either bool or a custom - # callable. - attr_names_with_reprs = tuple( - (a.name, (repr if a.repr is True else a.repr), a.init) - for a in attrs - if a.repr is not False - ) - globs = { - name + "_repr": r for name, r, _ in attr_names_with_reprs if r != repr - } - globs["_compat"] = _compat - globs["AttributeError"] = AttributeError - globs["NOTHING"] = NOTHING - attribute_fragments = [] - for name, r, i in attr_names_with_reprs: - accessor = ( - "self." + name if i else 'getattr(self, "' + name + '", NOTHING)' - ) - fragment = ( - "%s={%s!r}" % (name, accessor) - if r == repr - else "%s={%s_repr(%s)}" % (name, name, accessor) - ) - attribute_fragments.append(fragment) - repr_fragment = ", ".join(attribute_fragments) - - if ns is None: - cls_name_fragment = '{self.__class__.__qualname__.rsplit(">.", 1)[-1]}' - else: - cls_name_fragment = ns + ".{self.__class__.__name__}" - - lines = [ - "def __repr__(self):", - " try:", - " already_repring = _compat.repr_context.already_repring", - " except AttributeError:", - " already_repring = {id(self),}", - " _compat.repr_context.already_repring = already_repring", - " else:", - " if id(self) in already_repring:", - " return '...'", - " else:", - " already_repring.add(id(self))", - " try:", - f" return f'{cls_name_fragment}({repr_fragment})'", - " finally:", - " already_repring.remove(id(self))", - ] - - return _make_method( - "__repr__", "\n".join(lines), unique_filename, globs=globs - ) - - -def _add_repr(cls, ns=None, attrs=None): - """ - Add a repr method to *cls*. - """ - if attrs is None: - attrs = cls.__attrs_attrs__ - - cls.__repr__ = _make_repr(attrs, ns, cls) - return cls - - -def fields(cls): - """ - Return the tuple of *attrs* attributes for a class. - - The tuple also allows accessing the fields by their names (see below for - examples). - - :param type cls: Class to introspect. - - :raise TypeError: If *cls* is not a class. - :raise attrs.exceptions.NotAnAttrsClassError: If *cls* is not an *attrs* - class. - - :rtype: tuple (with name accessors) of `attrs.Attribute` - - .. versionchanged:: 16.2.0 Returned tuple allows accessing the fields - by name. - .. versionchanged:: 23.1.0 Add support for generic classes. - """ - generic_base = get_generic_base(cls) - - if generic_base is None and not isinstance(cls, type): - msg = "Passed object must be a class." - raise TypeError(msg) - - attrs = getattr(cls, "__attrs_attrs__", None) - - if attrs is None: - if generic_base is not None: - attrs = getattr(generic_base, "__attrs_attrs__", None) - if attrs is not None: - # Even though this is global state, stick it on here to speed - # it up. We rely on `cls` being cached for this to be - # efficient. - cls.__attrs_attrs__ = attrs - return attrs - msg = f"{cls!r} is not an attrs-decorated class." - raise NotAnAttrsClassError(msg) - - return attrs - - -def fields_dict(cls): - """ - Return an ordered dictionary of *attrs* attributes for a class, whose - keys are the attribute names. - - :param type cls: Class to introspect. - - :raise TypeError: If *cls* is not a class. - :raise attrs.exceptions.NotAnAttrsClassError: If *cls* is not an *attrs* - class. - - :rtype: dict - - .. versionadded:: 18.1.0 - """ - if not isinstance(cls, type): - msg = "Passed object must be a class." - raise TypeError(msg) - attrs = getattr(cls, "__attrs_attrs__", None) - if attrs is None: - msg = f"{cls!r} is not an attrs-decorated class." - raise NotAnAttrsClassError(msg) - return {a.name: a for a in attrs} - - -def validate(inst): - """ - Validate all attributes on *inst* that have a validator. - - Leaves all exceptions through. - - :param inst: Instance of a class with *attrs* attributes. - """ - if _config._run_validators is False: - return - - for a in fields(inst.__class__): - v = a.validator - if v is not None: - v(inst, a, getattr(inst, a.name)) - - -def _is_slot_cls(cls): - return "__slots__" in cls.__dict__ - - -def _is_slot_attr(a_name, base_attr_map): - """ - Check if the attribute name comes from a slot class. - """ - return a_name in base_attr_map and _is_slot_cls(base_attr_map[a_name]) - - -def _make_init( - cls, - attrs, - pre_init, - pre_init_has_args, - post_init, - frozen, - slots, - cache_hash, - base_attr_map, - is_exc, - cls_on_setattr, - attrs_init, -): - has_cls_on_setattr = ( - cls_on_setattr is not None and cls_on_setattr is not setters.NO_OP - ) - - if frozen and has_cls_on_setattr: - msg = "Frozen classes can't use on_setattr." - raise ValueError(msg) - - needs_cached_setattr = cache_hash or frozen - filtered_attrs = [] - attr_dict = {} - for a in attrs: - if not a.init and a.default is NOTHING: - continue - - filtered_attrs.append(a) - attr_dict[a.name] = a - - if a.on_setattr is not None: - if frozen is True: - msg = "Frozen classes can't use on_setattr." - raise ValueError(msg) - - needs_cached_setattr = True - elif has_cls_on_setattr and a.on_setattr is not setters.NO_OP: - needs_cached_setattr = True - - unique_filename = _generate_unique_filename(cls, "init") - - script, globs, annotations = _attrs_to_init_script( - filtered_attrs, - frozen, - slots, - pre_init, - pre_init_has_args, - post_init, - cache_hash, - base_attr_map, - is_exc, - needs_cached_setattr, - has_cls_on_setattr, - attrs_init, - ) - if cls.__module__ in sys.modules: - # This makes typing.get_type_hints(CLS.__init__) resolve string types. - globs.update(sys.modules[cls.__module__].__dict__) - - globs.update({"NOTHING": NOTHING, "attr_dict": attr_dict}) - - if needs_cached_setattr: - # Save the lookup overhead in __init__ if we need to circumvent - # setattr hooks. - globs["_cached_setattr_get"] = _obj_setattr.__get__ - - init = _make_method( - "__attrs_init__" if attrs_init else "__init__", - script, - unique_filename, - globs, - ) - init.__annotations__ = annotations - - return init - - -def _setattr(attr_name, value_var, has_on_setattr): - """ - Use the cached object.setattr to set *attr_name* to *value_var*. - """ - return f"_setattr('{attr_name}', {value_var})" - - -def _setattr_with_converter(attr_name, value_var, has_on_setattr): - """ - Use the cached object.setattr to set *attr_name* to *value_var*, but run - its converter first. - """ - return "_setattr('%s', %s(%s))" % ( - attr_name, - _init_converter_pat % (attr_name,), - value_var, - ) - - -def _assign(attr_name, value, has_on_setattr): - """ - Unless *attr_name* has an on_setattr hook, use normal assignment. Otherwise - relegate to _setattr. - """ - if has_on_setattr: - return _setattr(attr_name, value, True) - - return f"self.{attr_name} = {value}" - - -def _assign_with_converter(attr_name, value_var, has_on_setattr): - """ - Unless *attr_name* has an on_setattr hook, use normal assignment after - conversion. Otherwise relegate to _setattr_with_converter. - """ - if has_on_setattr: - return _setattr_with_converter(attr_name, value_var, True) - - return "self.%s = %s(%s)" % ( - attr_name, - _init_converter_pat % (attr_name,), - value_var, - ) - - -def _attrs_to_init_script( - attrs, - frozen, - slots, - pre_init, - pre_init_has_args, - post_init, - cache_hash, - base_attr_map, - is_exc, - needs_cached_setattr, - has_cls_on_setattr, - attrs_init, -): - """ - Return a script of an initializer for *attrs* and a dict of globals. - - The globals are expected by the generated script. - - If *frozen* is True, we cannot set the attributes directly so we use - a cached ``object.__setattr__``. - """ - lines = [] - if pre_init: - lines.append("self.__attrs_pre_init__()") - - if needs_cached_setattr: - lines.append( - # Circumvent the __setattr__ descriptor to save one lookup per - # assignment. - # Note _setattr will be used again below if cache_hash is True - "_setattr = _cached_setattr_get(self)" - ) - - if frozen is True: - if slots is True: - fmt_setter = _setattr - fmt_setter_with_converter = _setattr_with_converter - else: - # Dict frozen classes assign directly to __dict__. - # But only if the attribute doesn't come from an ancestor slot - # class. - # Note _inst_dict will be used again below if cache_hash is True - lines.append("_inst_dict = self.__dict__") - - def fmt_setter(attr_name, value_var, has_on_setattr): - if _is_slot_attr(attr_name, base_attr_map): - return _setattr(attr_name, value_var, has_on_setattr) - - return f"_inst_dict['{attr_name}'] = {value_var}" - - def fmt_setter_with_converter( - attr_name, value_var, has_on_setattr - ): - if has_on_setattr or _is_slot_attr(attr_name, base_attr_map): - return _setattr_with_converter( - attr_name, value_var, has_on_setattr - ) - - return "_inst_dict['%s'] = %s(%s)" % ( - attr_name, - _init_converter_pat % (attr_name,), - value_var, - ) - - else: - # Not frozen. - fmt_setter = _assign - fmt_setter_with_converter = _assign_with_converter - - args = [] - kw_only_args = [] - attrs_to_validate = [] - - # This is a dictionary of names to validator and converter callables. - # Injecting this into __init__ globals lets us avoid lookups. - names_for_globals = {} - annotations = {"return": None} - - for a in attrs: - if a.validator: - attrs_to_validate.append(a) - - attr_name = a.name - has_on_setattr = a.on_setattr is not None or ( - a.on_setattr is not setters.NO_OP and has_cls_on_setattr - ) - # a.alias is set to maybe-mangled attr_name in _ClassBuilder if not - # explicitly provided - arg_name = a.alias - - has_factory = isinstance(a.default, Factory) - maybe_self = "self" if has_factory and a.default.takes_self else "" - - if a.init is False: - if has_factory: - init_factory_name = _init_factory_pat % (a.name,) - if a.converter is not None: - lines.append( - fmt_setter_with_converter( - attr_name, - init_factory_name + f"({maybe_self})", - has_on_setattr, - ) - ) - conv_name = _init_converter_pat % (a.name,) - names_for_globals[conv_name] = a.converter - else: - lines.append( - fmt_setter( - attr_name, - init_factory_name + f"({maybe_self})", - has_on_setattr, - ) - ) - names_for_globals[init_factory_name] = a.default.factory - elif a.converter is not None: - lines.append( - fmt_setter_with_converter( - attr_name, - f"attr_dict['{attr_name}'].default", - has_on_setattr, - ) - ) - conv_name = _init_converter_pat % (a.name,) - names_for_globals[conv_name] = a.converter - else: - lines.append( - fmt_setter( - attr_name, - f"attr_dict['{attr_name}'].default", - has_on_setattr, - ) - ) - elif a.default is not NOTHING and not has_factory: - arg = f"{arg_name}=attr_dict['{attr_name}'].default" - if a.kw_only: - kw_only_args.append(arg) - else: - args.append(arg) - - if a.converter is not None: - lines.append( - fmt_setter_with_converter( - attr_name, arg_name, has_on_setattr - ) - ) - names_for_globals[ - _init_converter_pat % (a.name,) - ] = a.converter - else: - lines.append(fmt_setter(attr_name, arg_name, has_on_setattr)) - - elif has_factory: - arg = f"{arg_name}=NOTHING" - if a.kw_only: - kw_only_args.append(arg) - else: - args.append(arg) - lines.append(f"if {arg_name} is not NOTHING:") - - init_factory_name = _init_factory_pat % (a.name,) - if a.converter is not None: - lines.append( - " " - + fmt_setter_with_converter( - attr_name, arg_name, has_on_setattr - ) - ) - lines.append("else:") - lines.append( - " " - + fmt_setter_with_converter( - attr_name, - init_factory_name + "(" + maybe_self + ")", - has_on_setattr, - ) - ) - names_for_globals[ - _init_converter_pat % (a.name,) - ] = a.converter - else: - lines.append( - " " + fmt_setter(attr_name, arg_name, has_on_setattr) - ) - lines.append("else:") - lines.append( - " " - + fmt_setter( - attr_name, - init_factory_name + "(" + maybe_self + ")", - has_on_setattr, - ) - ) - names_for_globals[init_factory_name] = a.default.factory - else: - if a.kw_only: - kw_only_args.append(arg_name) - else: - args.append(arg_name) - - if a.converter is not None: - lines.append( - fmt_setter_with_converter( - attr_name, arg_name, has_on_setattr - ) - ) - names_for_globals[ - _init_converter_pat % (a.name,) - ] = a.converter - else: - lines.append(fmt_setter(attr_name, arg_name, has_on_setattr)) - - if a.init is True: - if a.type is not None and a.converter is None: - annotations[arg_name] = a.type - elif a.converter is not None: - # Try to get the type from the converter. - t = _AnnotationExtractor(a.converter).get_first_param_type() - if t: - annotations[arg_name] = t - - if attrs_to_validate: # we can skip this if there are no validators. - names_for_globals["_config"] = _config - lines.append("if _config._run_validators is True:") - for a in attrs_to_validate: - val_name = "__attr_validator_" + a.name - attr_name = "__attr_" + a.name - lines.append(f" {val_name}(self, {attr_name}, self.{a.name})") - names_for_globals[val_name] = a.validator - names_for_globals[attr_name] = a - - if post_init: - lines.append("self.__attrs_post_init__()") - - # because this is set only after __attrs_post_init__ is called, a crash - # will result if post-init tries to access the hash code. This seemed - # preferable to setting this beforehand, in which case alteration to - # field values during post-init combined with post-init accessing the - # hash code would result in silent bugs. - if cache_hash: - if frozen: - if slots: # noqa: SIM108 - # if frozen and slots, then _setattr defined above - init_hash_cache = "_setattr('%s', %s)" - else: - # if frozen and not slots, then _inst_dict defined above - init_hash_cache = "_inst_dict['%s'] = %s" - else: - init_hash_cache = "self.%s = %s" - lines.append(init_hash_cache % (_hash_cache_field, "None")) - - # For exceptions we rely on BaseException.__init__ for proper - # initialization. - if is_exc: - vals = ",".join(f"self.{a.name}" for a in attrs if a.init) - - lines.append(f"BaseException.__init__(self, {vals})") - - args = ", ".join(args) - pre_init_args = args - if kw_only_args: - args += "%s*, %s" % ( - ", " if args else "", # leading comma - ", ".join(kw_only_args), # kw_only args - ) - pre_init_kw_only_args = ", ".join( - ["%s=%s" % (kw_arg, kw_arg) for kw_arg in kw_only_args] - ) - pre_init_args += ( - ", " if pre_init_args else "" - ) # handle only kwargs and no regular args - pre_init_args += pre_init_kw_only_args - - if pre_init and pre_init_has_args: - # If pre init method has arguments, pass same arguments as `__init__` - lines[0] = "self.__attrs_pre_init__(%s)" % pre_init_args - - return ( - "def %s(self, %s):\n %s\n" - % ( - ("__attrs_init__" if attrs_init else "__init__"), - args, - "\n ".join(lines) if lines else "pass", - ), - names_for_globals, - annotations, - ) - - -def _default_init_alias_for(name: str) -> str: - """ - The default __init__ parameter name for a field. - - This performs private-name adjustment via leading-unscore stripping, - and is the default value of Attribute.alias if not provided. - """ - - return name.lstrip("_") - - -class Attribute: - """ - *Read-only* representation of an attribute. - - .. warning:: - - You should never instantiate this class yourself. - - The class has *all* arguments of `attr.ib` (except for ``factory`` - which is only syntactic sugar for ``default=Factory(...)`` plus the - following: - - - ``name`` (`str`): The name of the attribute. - - ``alias`` (`str`): The __init__ parameter name of the attribute, after - any explicit overrides and default private-attribute-name handling. - - ``inherited`` (`bool`): Whether or not that attribute has been inherited - from a base class. - - ``eq_key`` and ``order_key`` (`typing.Callable` or `None`): The callables - that are used for comparing and ordering objects by this attribute, - respectively. These are set by passing a callable to `attr.ib`'s ``eq``, - ``order``, or ``cmp`` arguments. See also :ref:`comparison customization - `. - - Instances of this class are frequently used for introspection purposes - like: - - - `fields` returns a tuple of them. - - Validators get them passed as the first argument. - - The :ref:`field transformer ` hook receives a list of - them. - - The ``alias`` property exposes the __init__ parameter name of the field, - with any overrides and default private-attribute handling applied. - - - .. versionadded:: 20.1.0 *inherited* - .. versionadded:: 20.1.0 *on_setattr* - .. versionchanged:: 20.2.0 *inherited* is not taken into account for - equality checks and hashing anymore. - .. versionadded:: 21.1.0 *eq_key* and *order_key* - .. versionadded:: 22.2.0 *alias* - - For the full version history of the fields, see `attr.ib`. - """ - - __slots__ = ( - "name", - "default", - "validator", - "repr", - "eq", - "eq_key", - "order", - "order_key", - "hash", - "init", - "metadata", - "type", - "converter", - "kw_only", - "inherited", - "on_setattr", - "alias", - ) - - def __init__( - self, - name, - default, - validator, - repr, - cmp, # XXX: unused, remove along with other cmp code. - hash, - init, - inherited, - metadata=None, - type=None, - converter=None, - kw_only=False, - eq=None, - eq_key=None, - order=None, - order_key=None, - on_setattr=None, - alias=None, - ): - eq, eq_key, order, order_key = _determine_attrib_eq_order( - cmp, eq_key or eq, order_key or order, True - ) - - # Cache this descriptor here to speed things up later. - bound_setattr = _obj_setattr.__get__(self) - - # Despite the big red warning, people *do* instantiate `Attribute` - # themselves. - bound_setattr("name", name) - bound_setattr("default", default) - bound_setattr("validator", validator) - bound_setattr("repr", repr) - bound_setattr("eq", eq) - bound_setattr("eq_key", eq_key) - bound_setattr("order", order) - bound_setattr("order_key", order_key) - bound_setattr("hash", hash) - bound_setattr("init", init) - bound_setattr("converter", converter) - bound_setattr( - "metadata", - ( - types.MappingProxyType(dict(metadata)) # Shallow copy - if metadata - else _empty_metadata_singleton - ), - ) - bound_setattr("type", type) - bound_setattr("kw_only", kw_only) - bound_setattr("inherited", inherited) - bound_setattr("on_setattr", on_setattr) - bound_setattr("alias", alias) - - def __setattr__(self, name, value): - raise FrozenInstanceError() - - @classmethod - def from_counting_attr(cls, name, ca, type=None): - # type holds the annotated value. deal with conflicts: - if type is None: - type = ca.type - elif ca.type is not None: - msg = "Type annotation and type argument cannot both be present" - raise ValueError(msg) - inst_dict = { - k: getattr(ca, k) - for k in Attribute.__slots__ - if k - not in ( - "name", - "validator", - "default", - "type", - "inherited", - ) # exclude methods and deprecated alias - } - return cls( - name=name, - validator=ca._validator, - default=ca._default, - type=type, - cmp=None, - inherited=False, - **inst_dict, - ) - - # Don't use attrs.evolve since fields(Attribute) doesn't work - def evolve(self, **changes): - """ - Copy *self* and apply *changes*. - - This works similarly to `attrs.evolve` but that function does not work - with `Attribute`. - - It is mainly meant to be used for `transform-fields`. - - .. versionadded:: 20.3.0 - """ - new = copy.copy(self) - - new._setattrs(changes.items()) - - return new - - # Don't use _add_pickle since fields(Attribute) doesn't work - def __getstate__(self): - """ - Play nice with pickle. - """ - return tuple( - getattr(self, name) if name != "metadata" else dict(self.metadata) - for name in self.__slots__ - ) - - def __setstate__(self, state): - """ - Play nice with pickle. - """ - self._setattrs(zip(self.__slots__, state)) - - def _setattrs(self, name_values_pairs): - bound_setattr = _obj_setattr.__get__(self) - for name, value in name_values_pairs: - if name != "metadata": - bound_setattr(name, value) - else: - bound_setattr( - name, - types.MappingProxyType(dict(value)) - if value - else _empty_metadata_singleton, - ) - - -_a = [ - Attribute( - name=name, - default=NOTHING, - validator=None, - repr=True, - cmp=None, - eq=True, - order=False, - hash=(name != "metadata"), - init=True, - inherited=False, - alias=_default_init_alias_for(name), - ) - for name in Attribute.__slots__ -] - -Attribute = _add_hash( - _add_eq( - _add_repr(Attribute, attrs=_a), - attrs=[a for a in _a if a.name != "inherited"], - ), - attrs=[a for a in _a if a.hash and a.name != "inherited"], -) - - -class _CountingAttr: - """ - Intermediate representation of attributes that uses a counter to preserve - the order in which the attributes have been defined. - - *Internal* data structure of the attrs library. Running into is most - likely the result of a bug like a forgotten `@attr.s` decorator. - """ - - __slots__ = ( - "counter", - "_default", - "repr", - "eq", - "eq_key", - "order", - "order_key", - "hash", - "init", - "metadata", - "_validator", - "converter", - "type", - "kw_only", - "on_setattr", - "alias", - ) - __attrs_attrs__ = ( - *tuple( - Attribute( - name=name, - alias=_default_init_alias_for(name), - default=NOTHING, - validator=None, - repr=True, - cmp=None, - hash=True, - init=True, - kw_only=False, - eq=True, - eq_key=None, - order=False, - order_key=None, - inherited=False, - on_setattr=None, - ) - for name in ( - "counter", - "_default", - "repr", - "eq", - "order", - "hash", - "init", - "on_setattr", - "alias", - ) - ), - Attribute( - name="metadata", - alias="metadata", - default=None, - validator=None, - repr=True, - cmp=None, - hash=False, - init=True, - kw_only=False, - eq=True, - eq_key=None, - order=False, - order_key=None, - inherited=False, - on_setattr=None, - ), - ) - cls_counter = 0 - - def __init__( - self, - default, - validator, - repr, - cmp, - hash, - init, - converter, - metadata, - type, - kw_only, - eq, - eq_key, - order, - order_key, - on_setattr, - alias, - ): - _CountingAttr.cls_counter += 1 - self.counter = _CountingAttr.cls_counter - self._default = default - self._validator = validator - self.converter = converter - self.repr = repr - self.eq = eq - self.eq_key = eq_key - self.order = order - self.order_key = order_key - self.hash = hash - self.init = init - self.metadata = metadata - self.type = type - self.kw_only = kw_only - self.on_setattr = on_setattr - self.alias = alias - - def validator(self, meth): - """ - Decorator that adds *meth* to the list of validators. - - Returns *meth* unchanged. - - .. versionadded:: 17.1.0 - """ - if self._validator is None: - self._validator = meth - else: - self._validator = and_(self._validator, meth) - return meth - - def default(self, meth): - """ - Decorator that allows to set the default for an attribute. - - Returns *meth* unchanged. - - :raises DefaultAlreadySetError: If default has been set before. - - .. versionadded:: 17.1.0 - """ - if self._default is not NOTHING: - raise DefaultAlreadySetError() - - self._default = Factory(meth, takes_self=True) - - return meth - - -_CountingAttr = _add_eq(_add_repr(_CountingAttr)) - - -class Factory: - """ - Stores a factory callable. - - If passed as the default value to `attrs.field`, the factory is used to - generate a new value. - - :param callable factory: A callable that takes either none or exactly one - mandatory positional argument depending on *takes_self*. - :param bool takes_self: Pass the partially initialized instance that is - being initialized as a positional argument. - - .. versionadded:: 17.1.0 *takes_self* - """ - - __slots__ = ("factory", "takes_self") - - def __init__(self, factory, takes_self=False): - self.factory = factory - self.takes_self = takes_self - - def __getstate__(self): - """ - Play nice with pickle. - """ - return tuple(getattr(self, name) for name in self.__slots__) - - def __setstate__(self, state): - """ - Play nice with pickle. - """ - for name, value in zip(self.__slots__, state): - setattr(self, name, value) - - -_f = [ - Attribute( - name=name, - default=NOTHING, - validator=None, - repr=True, - cmp=None, - eq=True, - order=False, - hash=True, - init=True, - inherited=False, - ) - for name in Factory.__slots__ -] - -Factory = _add_hash(_add_eq(_add_repr(Factory, attrs=_f), attrs=_f), attrs=_f) - - -def make_class( - name, attrs, bases=(object,), class_body=None, **attributes_arguments -): - r""" - A quick way to create a new class called *name* with *attrs*. - - :param str name: The name for the new class. - - :param attrs: A list of names or a dictionary of mappings of names to - `attr.ib`\ s / `attrs.field`\ s. - - The order is deduced from the order of the names or attributes inside - *attrs*. Otherwise the order of the definition of the attributes is - used. - :type attrs: `list` or `dict` - - :param tuple bases: Classes that the new class will subclass. - - :param dict class_body: An optional dictionary of class attributes for the new class. - - :param attributes_arguments: Passed unmodified to `attr.s`. - - :return: A new class with *attrs*. - :rtype: type - - .. versionadded:: 17.1.0 *bases* - .. versionchanged:: 18.1.0 If *attrs* is ordered, the order is retained. - .. versionchanged:: 23.2.0 *class_body* - """ - if isinstance(attrs, dict): - cls_dict = attrs - elif isinstance(attrs, (list, tuple)): - cls_dict = {a: attrib() for a in attrs} - else: - msg = "attrs argument must be a dict or a list." - raise TypeError(msg) - - pre_init = cls_dict.pop("__attrs_pre_init__", None) - post_init = cls_dict.pop("__attrs_post_init__", None) - user_init = cls_dict.pop("__init__", None) - - body = {} - if class_body is not None: - body.update(class_body) - if pre_init is not None: - body["__attrs_pre_init__"] = pre_init - if post_init is not None: - body["__attrs_post_init__"] = post_init - if user_init is not None: - body["__init__"] = user_init - - type_ = types.new_class(name, bases, {}, lambda ns: ns.update(body)) - - # For pickling to work, the __module__ variable needs to be set to the - # frame where the class is created. Bypass this step in environments where - # sys._getframe is not defined (Jython for example) or sys._getframe is not - # defined for arguments greater than 0 (IronPython). - with contextlib.suppress(AttributeError, ValueError): - type_.__module__ = sys._getframe(1).f_globals.get( - "__name__", "__main__" - ) - - # We do it here for proper warnings with meaningful stacklevel. - cmp = attributes_arguments.pop("cmp", None) - ( - attributes_arguments["eq"], - attributes_arguments["order"], - ) = _determine_attrs_eq_order( - cmp, - attributes_arguments.get("eq"), - attributes_arguments.get("order"), - True, - ) - - return _attrs(these=cls_dict, **attributes_arguments)(type_) - - -# These are required by within this module so we define them here and merely -# import into .validators / .converters. - - -@attrs(slots=True, hash=True) -class _AndValidator: - """ - Compose many validators to a single one. - """ - - _validators = attrib() - - def __call__(self, inst, attr, value): - for v in self._validators: - v(inst, attr, value) - - -def and_(*validators): - """ - A validator that composes multiple validators into one. - - When called on a value, it runs all wrapped validators. - - :param callables validators: Arbitrary number of validators. - - .. versionadded:: 17.1.0 - """ - vals = [] - for validator in validators: - vals.extend( - validator._validators - if isinstance(validator, _AndValidator) - else [validator] - ) - - return _AndValidator(tuple(vals)) - - -def pipe(*converters): - """ - A converter that composes multiple converters into one. - - When called on a value, it runs all wrapped converters, returning the - *last* value. - - Type annotations will be inferred from the wrapped converters', if - they have any. - - :param callables converters: Arbitrary number of converters. - - .. versionadded:: 20.1.0 - """ - - def pipe_converter(val): - for converter in converters: - val = converter(val) - - return val - - if not converters: - # If the converter list is empty, pipe_converter is the identity. - A = typing.TypeVar("A") - pipe_converter.__annotations__ = {"val": A, "return": A} - else: - # Get parameter type from first converter. - t = _AnnotationExtractor(converters[0]).get_first_param_type() - if t: - pipe_converter.__annotations__["val"] = t - - # Get return type from last converter. - rt = _AnnotationExtractor(converters[-1]).get_return_type() - if rt: - pipe_converter.__annotations__["return"] = rt - - return pipe_converter diff --git a/.venv/Lib/site-packages/attr/_next_gen.py b/.venv/Lib/site-packages/attr/_next_gen.py deleted file mode 100644 index 1fb9f25..0000000 --- a/.venv/Lib/site-packages/attr/_next_gen.py +++ /dev/null @@ -1,229 +0,0 @@ -# SPDX-License-Identifier: MIT - -""" -These are keyword-only APIs that call `attr.s` and `attr.ib` with different -default values. -""" - - -from functools import partial - -from . import setters -from ._funcs import asdict as _asdict -from ._funcs import astuple as _astuple -from ._make import ( - NOTHING, - _frozen_setattrs, - _ng_default_on_setattr, - attrib, - attrs, -) -from .exceptions import UnannotatedAttributeError - - -def define( - maybe_cls=None, - *, - these=None, - repr=None, - unsafe_hash=None, - hash=None, - init=None, - slots=True, - frozen=False, - weakref_slot=True, - str=False, - auto_attribs=None, - kw_only=False, - cache_hash=False, - auto_exc=True, - eq=None, - order=False, - auto_detect=True, - getstate_setstate=None, - on_setattr=None, - field_transformer=None, - match_args=True, -): - r""" - Define an *attrs* class. - - Differences to the classic `attr.s` that it uses underneath: - - - Automatically detect whether or not *auto_attribs* should be `True` (c.f. - *auto_attribs* parameter). - - Converters and validators run when attributes are set by default -- if - *frozen* is `False`. - - *slots=True* - - .. caution:: - - Usually this has only upsides and few visible effects in everyday - programming. But it *can* lead to some surprising behaviors, so please - make sure to read :term:`slotted classes`. - - *auto_exc=True* - - *auto_detect=True* - - *order=False* - - Some options that were only relevant on Python 2 or were kept around for - backwards-compatibility have been removed. - - Please note that these are all defaults and you can change them as you - wish. - - :param Optional[bool] auto_attribs: If set to `True` or `False`, it behaves - exactly like `attr.s`. If left `None`, `attr.s` will try to guess: - - 1. If any attributes are annotated and no unannotated `attrs.fields`\ s - are found, it assumes *auto_attribs=True*. - 2. Otherwise it assumes *auto_attribs=False* and tries to collect - `attrs.fields`\ s. - - For now, please refer to `attr.s` for the rest of the parameters. - - .. versionadded:: 20.1.0 - .. versionchanged:: 21.3.0 Converters are also run ``on_setattr``. - .. versionadded:: 22.2.0 - *unsafe_hash* as an alias for *hash* (for :pep:`681` compliance). - """ - - def do_it(cls, auto_attribs): - return attrs( - maybe_cls=cls, - these=these, - repr=repr, - hash=hash, - unsafe_hash=unsafe_hash, - init=init, - slots=slots, - frozen=frozen, - weakref_slot=weakref_slot, - str=str, - auto_attribs=auto_attribs, - kw_only=kw_only, - cache_hash=cache_hash, - auto_exc=auto_exc, - eq=eq, - order=order, - auto_detect=auto_detect, - collect_by_mro=True, - getstate_setstate=getstate_setstate, - on_setattr=on_setattr, - field_transformer=field_transformer, - match_args=match_args, - ) - - def wrap(cls): - """ - Making this a wrapper ensures this code runs during class creation. - - We also ensure that frozen-ness of classes is inherited. - """ - nonlocal frozen, on_setattr - - had_on_setattr = on_setattr not in (None, setters.NO_OP) - - # By default, mutable classes convert & validate on setattr. - if frozen is False and on_setattr is None: - on_setattr = _ng_default_on_setattr - - # However, if we subclass a frozen class, we inherit the immutability - # and disable on_setattr. - for base_cls in cls.__bases__: - if base_cls.__setattr__ is _frozen_setattrs: - if had_on_setattr: - msg = "Frozen classes can't use on_setattr (frozen-ness was inherited)." - raise ValueError(msg) - - on_setattr = setters.NO_OP - break - - if auto_attribs is not None: - return do_it(cls, auto_attribs) - - try: - return do_it(cls, True) - except UnannotatedAttributeError: - return do_it(cls, False) - - # maybe_cls's type depends on the usage of the decorator. It's a class - # if it's used as `@attrs` but ``None`` if used as `@attrs()`. - if maybe_cls is None: - return wrap - - return wrap(maybe_cls) - - -mutable = define -frozen = partial(define, frozen=True, on_setattr=None) - - -def field( - *, - default=NOTHING, - validator=None, - repr=True, - hash=None, - init=True, - metadata=None, - type=None, - converter=None, - factory=None, - kw_only=False, - eq=None, - order=None, - on_setattr=None, - alias=None, -): - """ - Identical to `attr.ib`, except keyword-only and with some arguments - removed. - - .. versionadded:: 23.1.0 - The *type* parameter has been re-added; mostly for `attrs.make_class`. - Please note that type checkers ignore this metadata. - .. versionadded:: 20.1.0 - """ - return attrib( - default=default, - validator=validator, - repr=repr, - hash=hash, - init=init, - metadata=metadata, - type=type, - converter=converter, - factory=factory, - kw_only=kw_only, - eq=eq, - order=order, - on_setattr=on_setattr, - alias=alias, - ) - - -def asdict(inst, *, recurse=True, filter=None, value_serializer=None): - """ - Same as `attr.asdict`, except that collections types are always retained - and dict is always used as *dict_factory*. - - .. versionadded:: 21.3.0 - """ - return _asdict( - inst=inst, - recurse=recurse, - filter=filter, - value_serializer=value_serializer, - retain_collection_types=True, - ) - - -def astuple(inst, *, recurse=True, filter=None): - """ - Same as `attr.astuple`, except that collections types are always retained - and `tuple` is always used as the *tuple_factory*. - - .. versionadded:: 21.3.0 - """ - return _astuple( - inst=inst, recurse=recurse, filter=filter, retain_collection_types=True - ) diff --git a/.venv/Lib/site-packages/attr/_typing_compat.pyi b/.venv/Lib/site-packages/attr/_typing_compat.pyi deleted file mode 100644 index ca7b71e..0000000 --- a/.venv/Lib/site-packages/attr/_typing_compat.pyi +++ /dev/null @@ -1,15 +0,0 @@ -from typing import Any, ClassVar, Protocol - -# MYPY is a special constant in mypy which works the same way as `TYPE_CHECKING`. -MYPY = False - -if MYPY: - # A protocol to be able to statically accept an attrs class. - class AttrsInstance_(Protocol): - __attrs_attrs__: ClassVar[Any] - -else: - # For type checkers without plug-in support use an empty protocol that - # will (hopefully) be combined into a union. - class AttrsInstance_(Protocol): - pass diff --git a/.venv/Lib/site-packages/attr/_version_info.py b/.venv/Lib/site-packages/attr/_version_info.py deleted file mode 100644 index 51a1312..0000000 --- a/.venv/Lib/site-packages/attr/_version_info.py +++ /dev/null @@ -1,86 +0,0 @@ -# SPDX-License-Identifier: MIT - - -from functools import total_ordering - -from ._funcs import astuple -from ._make import attrib, attrs - - -@total_ordering -@attrs(eq=False, order=False, slots=True, frozen=True) -class VersionInfo: - """ - A version object that can be compared to tuple of length 1--4: - - >>> attr.VersionInfo(19, 1, 0, "final") <= (19, 2) - True - >>> attr.VersionInfo(19, 1, 0, "final") < (19, 1, 1) - True - >>> vi = attr.VersionInfo(19, 2, 0, "final") - >>> vi < (19, 1, 1) - False - >>> vi < (19,) - False - >>> vi == (19, 2,) - True - >>> vi == (19, 2, 1) - False - - .. versionadded:: 19.2 - """ - - year = attrib(type=int) - minor = attrib(type=int) - micro = attrib(type=int) - releaselevel = attrib(type=str) - - @classmethod - def _from_version_string(cls, s): - """ - Parse *s* and return a _VersionInfo. - """ - v = s.split(".") - if len(v) == 3: - v.append("final") - - return cls( - year=int(v[0]), minor=int(v[1]), micro=int(v[2]), releaselevel=v[3] - ) - - def _ensure_tuple(self, other): - """ - Ensure *other* is a tuple of a valid length. - - Returns a possibly transformed *other* and ourselves as a tuple of - the same length as *other*. - """ - - if self.__class__ is other.__class__: - other = astuple(other) - - if not isinstance(other, tuple): - raise NotImplementedError - - if not (1 <= len(other) <= 4): - raise NotImplementedError - - return astuple(self)[: len(other)], other - - def __eq__(self, other): - try: - us, them = self._ensure_tuple(other) - except NotImplementedError: - return NotImplemented - - return us == them - - def __lt__(self, other): - try: - us, them = self._ensure_tuple(other) - except NotImplementedError: - return NotImplemented - - # Since alphabetically "dev0" < "final" < "post1" < "post2", we don't - # have to do anything special with releaselevel for now. - return us < them diff --git a/.venv/Lib/site-packages/attr/_version_info.pyi b/.venv/Lib/site-packages/attr/_version_info.pyi deleted file mode 100644 index 45ced08..0000000 --- a/.venv/Lib/site-packages/attr/_version_info.pyi +++ /dev/null @@ -1,9 +0,0 @@ -class VersionInfo: - @property - def year(self) -> int: ... - @property - def minor(self) -> int: ... - @property - def micro(self) -> int: ... - @property - def releaselevel(self) -> str: ... diff --git a/.venv/Lib/site-packages/attr/converters.py b/.venv/Lib/site-packages/attr/converters.py deleted file mode 100644 index 2bf4c90..0000000 --- a/.venv/Lib/site-packages/attr/converters.py +++ /dev/null @@ -1,144 +0,0 @@ -# SPDX-License-Identifier: MIT - -""" -Commonly useful converters. -""" - - -import typing - -from ._compat import _AnnotationExtractor -from ._make import NOTHING, Factory, pipe - - -__all__ = [ - "default_if_none", - "optional", - "pipe", - "to_bool", -] - - -def optional(converter): - """ - A converter that allows an attribute to be optional. An optional attribute - is one which can be set to ``None``. - - Type annotations will be inferred from the wrapped converter's, if it - has any. - - :param callable converter: the converter that is used for non-``None`` - values. - - .. versionadded:: 17.1.0 - """ - - def optional_converter(val): - if val is None: - return None - return converter(val) - - xtr = _AnnotationExtractor(converter) - - t = xtr.get_first_param_type() - if t: - optional_converter.__annotations__["val"] = typing.Optional[t] - - rt = xtr.get_return_type() - if rt: - optional_converter.__annotations__["return"] = typing.Optional[rt] - - return optional_converter - - -def default_if_none(default=NOTHING, factory=None): - """ - A converter that allows to replace ``None`` values by *default* or the - result of *factory*. - - :param default: Value to be used if ``None`` is passed. Passing an instance - of `attrs.Factory` is supported, however the ``takes_self`` option - is *not*. - :param callable factory: A callable that takes no parameters whose result - is used if ``None`` is passed. - - :raises TypeError: If **neither** *default* or *factory* is passed. - :raises TypeError: If **both** *default* and *factory* are passed. - :raises ValueError: If an instance of `attrs.Factory` is passed with - ``takes_self=True``. - - .. versionadded:: 18.2.0 - """ - if default is NOTHING and factory is None: - msg = "Must pass either `default` or `factory`." - raise TypeError(msg) - - if default is not NOTHING and factory is not None: - msg = "Must pass either `default` or `factory` but not both." - raise TypeError(msg) - - if factory is not None: - default = Factory(factory) - - if isinstance(default, Factory): - if default.takes_self: - msg = "`takes_self` is not supported by default_if_none." - raise ValueError(msg) - - def default_if_none_converter(val): - if val is not None: - return val - - return default.factory() - - else: - - def default_if_none_converter(val): - if val is not None: - return val - - return default - - return default_if_none_converter - - -def to_bool(val): - """ - Convert "boolean" strings (e.g., from env. vars.) to real booleans. - - Values mapping to :code:`True`: - - - :code:`True` - - :code:`"true"` / :code:`"t"` - - :code:`"yes"` / :code:`"y"` - - :code:`"on"` - - :code:`"1"` - - :code:`1` - - Values mapping to :code:`False`: - - - :code:`False` - - :code:`"false"` / :code:`"f"` - - :code:`"no"` / :code:`"n"` - - :code:`"off"` - - :code:`"0"` - - :code:`0` - - :raises ValueError: for any other value. - - .. versionadded:: 21.3.0 - """ - if isinstance(val, str): - val = val.lower() - truthy = {True, "true", "t", "yes", "y", "on", "1", 1} - falsy = {False, "false", "f", "no", "n", "off", "0", 0} - try: - if val in truthy: - return True - if val in falsy: - return False - except TypeError: - # Raised when "val" is not hashable (e.g., lists) - pass - msg = f"Cannot convert value to bool: {val}" - raise ValueError(msg) diff --git a/.venv/Lib/site-packages/attr/converters.pyi b/.venv/Lib/site-packages/attr/converters.pyi deleted file mode 100644 index 5abb49f..0000000 --- a/.venv/Lib/site-packages/attr/converters.pyi +++ /dev/null @@ -1,13 +0,0 @@ -from typing import Callable, TypeVar, overload - -from . import _ConverterType - -_T = TypeVar("_T") - -def pipe(*validators: _ConverterType) -> _ConverterType: ... -def optional(converter: _ConverterType) -> _ConverterType: ... -@overload -def default_if_none(default: _T) -> _ConverterType: ... -@overload -def default_if_none(*, factory: Callable[[], _T]) -> _ConverterType: ... -def to_bool(val: str) -> bool: ... diff --git a/.venv/Lib/site-packages/attr/exceptions.py b/.venv/Lib/site-packages/attr/exceptions.py deleted file mode 100644 index 3b7abb8..0000000 --- a/.venv/Lib/site-packages/attr/exceptions.py +++ /dev/null @@ -1,95 +0,0 @@ -# SPDX-License-Identifier: MIT - -from __future__ import annotations - -from typing import ClassVar - - -class FrozenError(AttributeError): - """ - A frozen/immutable instance or attribute have been attempted to be - modified. - - It mirrors the behavior of ``namedtuples`` by using the same error message - and subclassing `AttributeError`. - - .. versionadded:: 20.1.0 - """ - - msg = "can't set attribute" - args: ClassVar[tuple[str]] = [msg] - - -class FrozenInstanceError(FrozenError): - """ - A frozen instance has been attempted to be modified. - - .. versionadded:: 16.1.0 - """ - - -class FrozenAttributeError(FrozenError): - """ - A frozen attribute has been attempted to be modified. - - .. versionadded:: 20.1.0 - """ - - -class AttrsAttributeNotFoundError(ValueError): - """ - An *attrs* function couldn't find an attribute that the user asked for. - - .. versionadded:: 16.2.0 - """ - - -class NotAnAttrsClassError(ValueError): - """ - A non-*attrs* class has been passed into an *attrs* function. - - .. versionadded:: 16.2.0 - """ - - -class DefaultAlreadySetError(RuntimeError): - """ - A default has been set when defining the field and is attempted to be reset - using the decorator. - - .. versionadded:: 17.1.0 - """ - - -class UnannotatedAttributeError(RuntimeError): - """ - A class with ``auto_attribs=True`` has a field without a type annotation. - - .. versionadded:: 17.3.0 - """ - - -class PythonTooOldError(RuntimeError): - """ - It was attempted to use an *attrs* feature that requires a newer Python - version. - - .. versionadded:: 18.2.0 - """ - - -class NotCallableError(TypeError): - """ - A field requiring a callable has been set with a value that is not - callable. - - .. versionadded:: 19.2.0 - """ - - def __init__(self, msg, value): - super(TypeError, self).__init__(msg, value) - self.msg = msg - self.value = value - - def __str__(self): - return str(self.msg) diff --git a/.venv/Lib/site-packages/attr/exceptions.pyi b/.venv/Lib/site-packages/attr/exceptions.pyi deleted file mode 100644 index f268011..0000000 --- a/.venv/Lib/site-packages/attr/exceptions.pyi +++ /dev/null @@ -1,17 +0,0 @@ -from typing import Any - -class FrozenError(AttributeError): - msg: str = ... - -class FrozenInstanceError(FrozenError): ... -class FrozenAttributeError(FrozenError): ... -class AttrsAttributeNotFoundError(ValueError): ... -class NotAnAttrsClassError(ValueError): ... -class DefaultAlreadySetError(RuntimeError): ... -class UnannotatedAttributeError(RuntimeError): ... -class PythonTooOldError(RuntimeError): ... - -class NotCallableError(TypeError): - msg: str = ... - value: Any = ... - def __init__(self, msg: str, value: Any) -> None: ... diff --git a/.venv/Lib/site-packages/attr/filters.py b/.venv/Lib/site-packages/attr/filters.py deleted file mode 100644 index a1e40c9..0000000 --- a/.venv/Lib/site-packages/attr/filters.py +++ /dev/null @@ -1,66 +0,0 @@ -# SPDX-License-Identifier: MIT - -""" -Commonly useful filters for `attr.asdict`. -""" - -from ._make import Attribute - - -def _split_what(what): - """ - Returns a tuple of `frozenset`s of classes and attributes. - """ - return ( - frozenset(cls for cls in what if isinstance(cls, type)), - frozenset(cls for cls in what if isinstance(cls, str)), - frozenset(cls for cls in what if isinstance(cls, Attribute)), - ) - - -def include(*what): - """ - Include *what*. - - :param what: What to include. - :type what: `list` of classes `type`, field names `str` or - `attrs.Attribute`\\ s - - :rtype: `callable` - - .. versionchanged:: 23.1.0 Accept strings with field names. - """ - cls, names, attrs = _split_what(what) - - def include_(attribute, value): - return ( - value.__class__ in cls - or attribute.name in names - or attribute in attrs - ) - - return include_ - - -def exclude(*what): - """ - Exclude *what*. - - :param what: What to exclude. - :type what: `list` of classes `type`, field names `str` or - `attrs.Attribute`\\ s. - - :rtype: `callable` - - .. versionchanged:: 23.3.0 Accept field name string as input argument - """ - cls, names, attrs = _split_what(what) - - def exclude_(attribute, value): - return not ( - value.__class__ in cls - or attribute.name in names - or attribute in attrs - ) - - return exclude_ diff --git a/.venv/Lib/site-packages/attr/filters.pyi b/.venv/Lib/site-packages/attr/filters.pyi deleted file mode 100644 index 8a02fa0..0000000 --- a/.venv/Lib/site-packages/attr/filters.pyi +++ /dev/null @@ -1,6 +0,0 @@ -from typing import Any, Union - -from . import Attribute, _FilterType - -def include(*what: Union[type, str, Attribute[Any]]) -> _FilterType[Any]: ... -def exclude(*what: Union[type, str, Attribute[Any]]) -> _FilterType[Any]: ... diff --git a/.venv/Lib/site-packages/attr/py.typed b/.venv/Lib/site-packages/attr/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/.venv/Lib/site-packages/attr/setters.py b/.venv/Lib/site-packages/attr/setters.py deleted file mode 100644 index 12ed675..0000000 --- a/.venv/Lib/site-packages/attr/setters.py +++ /dev/null @@ -1,73 +0,0 @@ -# SPDX-License-Identifier: MIT - -""" -Commonly used hooks for on_setattr. -""" - - -from . import _config -from .exceptions import FrozenAttributeError - - -def pipe(*setters): - """ - Run all *setters* and return the return value of the last one. - - .. versionadded:: 20.1.0 - """ - - def wrapped_pipe(instance, attrib, new_value): - rv = new_value - - for setter in setters: - rv = setter(instance, attrib, rv) - - return rv - - return wrapped_pipe - - -def frozen(_, __, ___): - """ - Prevent an attribute to be modified. - - .. versionadded:: 20.1.0 - """ - raise FrozenAttributeError() - - -def validate(instance, attrib, new_value): - """ - Run *attrib*'s validator on *new_value* if it has one. - - .. versionadded:: 20.1.0 - """ - if _config._run_validators is False: - return new_value - - v = attrib.validator - if not v: - return new_value - - v(instance, attrib, new_value) - - return new_value - - -def convert(instance, attrib, new_value): - """ - Run *attrib*'s converter -- if it has one -- on *new_value* and return the - result. - - .. versionadded:: 20.1.0 - """ - c = attrib.converter - if c: - return c(new_value) - - return new_value - - -# Sentinel for disabling class-wide *on_setattr* hooks for certain attributes. -# autodata stopped working, so the docstring is inlined in the API docs. -NO_OP = object() diff --git a/.venv/Lib/site-packages/attr/setters.pyi b/.venv/Lib/site-packages/attr/setters.pyi deleted file mode 100644 index 72f7ce4..0000000 --- a/.venv/Lib/site-packages/attr/setters.pyi +++ /dev/null @@ -1,19 +0,0 @@ -from typing import Any, NewType, NoReturn, TypeVar - -from . import Attribute, _OnSetAttrType - -_T = TypeVar("_T") - -def frozen( - instance: Any, attribute: Attribute[Any], new_value: Any -) -> NoReturn: ... -def pipe(*setters: _OnSetAttrType) -> _OnSetAttrType: ... -def validate(instance: Any, attribute: Attribute[_T], new_value: _T) -> _T: ... - -# convert is allowed to return Any, because they can be chained using pipe. -def convert( - instance: Any, attribute: Attribute[Any], new_value: Any -) -> Any: ... - -_NoOpType = NewType("_NoOpType", object) -NO_OP: _NoOpType diff --git a/.venv/Lib/site-packages/attr/validators.py b/.venv/Lib/site-packages/attr/validators.py deleted file mode 100644 index 34d6b76..0000000 --- a/.venv/Lib/site-packages/attr/validators.py +++ /dev/null @@ -1,681 +0,0 @@ -# SPDX-License-Identifier: MIT - -""" -Commonly useful validators. -""" - - -import operator -import re - -from contextlib import contextmanager -from re import Pattern - -from ._config import get_run_validators, set_run_validators -from ._make import _AndValidator, and_, attrib, attrs -from .converters import default_if_none -from .exceptions import NotCallableError - - -__all__ = [ - "and_", - "deep_iterable", - "deep_mapping", - "disabled", - "ge", - "get_disabled", - "gt", - "in_", - "instance_of", - "is_callable", - "le", - "lt", - "matches_re", - "max_len", - "min_len", - "not_", - "optional", - "provides", - "set_disabled", -] - - -def set_disabled(disabled): - """ - Globally disable or enable running validators. - - By default, they are run. - - :param disabled: If ``True``, disable running all validators. - :type disabled: bool - - .. warning:: - - This function is not thread-safe! - - .. versionadded:: 21.3.0 - """ - set_run_validators(not disabled) - - -def get_disabled(): - """ - Return a bool indicating whether validators are currently disabled or not. - - :return: ``True`` if validators are currently disabled. - :rtype: bool - - .. versionadded:: 21.3.0 - """ - return not get_run_validators() - - -@contextmanager -def disabled(): - """ - Context manager that disables running validators within its context. - - .. warning:: - - This context manager is not thread-safe! - - .. versionadded:: 21.3.0 - """ - set_run_validators(False) - try: - yield - finally: - set_run_validators(True) - - -@attrs(repr=False, slots=True, hash=True) -class _InstanceOfValidator: - type = attrib() - - def __call__(self, inst, attr, value): - """ - We use a callable class to be able to change the ``__repr__``. - """ - if not isinstance(value, self.type): - msg = "'{name}' must be {type!r} (got {value!r} that is a {actual!r}).".format( - name=attr.name, - type=self.type, - actual=value.__class__, - value=value, - ) - raise TypeError( - msg, - attr, - self.type, - value, - ) - - def __repr__(self): - return f"" - - -def instance_of(type): - """ - A validator that raises a `TypeError` if the initializer is called - with a wrong type for this particular attribute (checks are performed using - `isinstance` therefore it's also valid to pass a tuple of types). - - :param type: The type to check for. - :type type: type or tuple of type - - :raises TypeError: With a human readable error message, the attribute - (of type `attrs.Attribute`), the expected type, and the value it - got. - """ - return _InstanceOfValidator(type) - - -@attrs(repr=False, frozen=True, slots=True) -class _MatchesReValidator: - pattern = attrib() - match_func = attrib() - - def __call__(self, inst, attr, value): - """ - We use a callable class to be able to change the ``__repr__``. - """ - if not self.match_func(value): - msg = "'{name}' must match regex {pattern!r} ({value!r} doesn't)".format( - name=attr.name, pattern=self.pattern.pattern, value=value - ) - raise ValueError( - msg, - attr, - self.pattern, - value, - ) - - def __repr__(self): - return f"" - - -def matches_re(regex, flags=0, func=None): - r""" - A validator that raises `ValueError` if the initializer is called - with a string that doesn't match *regex*. - - :param regex: a regex string or precompiled pattern to match against - :param int flags: flags that will be passed to the underlying re function - (default 0) - :param callable func: which underlying `re` function to call. Valid options - are `re.fullmatch`, `re.search`, and `re.match`; the default ``None`` - means `re.fullmatch`. For performance reasons, the pattern is always - precompiled using `re.compile`. - - .. versionadded:: 19.2.0 - .. versionchanged:: 21.3.0 *regex* can be a pre-compiled pattern. - """ - valid_funcs = (re.fullmatch, None, re.search, re.match) - if func not in valid_funcs: - msg = "'func' must be one of {}.".format( - ", ".join( - sorted(e and e.__name__ or "None" for e in set(valid_funcs)) - ) - ) - raise ValueError(msg) - - if isinstance(regex, Pattern): - if flags: - msg = "'flags' can only be used with a string pattern; pass flags to re.compile() instead" - raise TypeError(msg) - pattern = regex - else: - pattern = re.compile(regex, flags) - - if func is re.match: - match_func = pattern.match - elif func is re.search: - match_func = pattern.search - else: - match_func = pattern.fullmatch - - return _MatchesReValidator(pattern, match_func) - - -@attrs(repr=False, slots=True, hash=True) -class _ProvidesValidator: - interface = attrib() - - def __call__(self, inst, attr, value): - """ - We use a callable class to be able to change the ``__repr__``. - """ - if not self.interface.providedBy(value): - msg = "'{name}' must provide {interface!r} which {value!r} doesn't.".format( - name=attr.name, interface=self.interface, value=value - ) - raise TypeError( - msg, - attr, - self.interface, - value, - ) - - def __repr__(self): - return f"" - - -def provides(interface): - """ - A validator that raises a `TypeError` if the initializer is called - with an object that does not provide the requested *interface* (checks are - performed using ``interface.providedBy(value)`` (see `zope.interface - `_). - - :param interface: The interface to check for. - :type interface: ``zope.interface.Interface`` - - :raises TypeError: With a human readable error message, the attribute - (of type `attrs.Attribute`), the expected interface, and the - value it got. - - .. deprecated:: 23.1.0 - """ - import warnings - - warnings.warn( - "attrs's zope-interface support is deprecated and will be removed in, " - "or after, April 2024.", - DeprecationWarning, - stacklevel=2, - ) - return _ProvidesValidator(interface) - - -@attrs(repr=False, slots=True, hash=True) -class _OptionalValidator: - validator = attrib() - - def __call__(self, inst, attr, value): - if value is None: - return - - self.validator(inst, attr, value) - - def __repr__(self): - return f"" - - -def optional(validator): - """ - A validator that makes an attribute optional. An optional attribute is one - which can be set to ``None`` in addition to satisfying the requirements of - the sub-validator. - - :param Callable | tuple[Callable] | list[Callable] validator: A validator - (or validators) that is used for non-``None`` values. - - .. versionadded:: 15.1.0 - .. versionchanged:: 17.1.0 *validator* can be a list of validators. - .. versionchanged:: 23.1.0 *validator* can also be a tuple of validators. - """ - if isinstance(validator, (list, tuple)): - return _OptionalValidator(_AndValidator(validator)) - - return _OptionalValidator(validator) - - -@attrs(repr=False, slots=True, hash=True) -class _InValidator: - options = attrib() - - def __call__(self, inst, attr, value): - try: - in_options = value in self.options - except TypeError: # e.g. `1 in "abc"` - in_options = False - - if not in_options: - msg = f"'{attr.name}' must be in {self.options!r} (got {value!r})" - raise ValueError( - msg, - attr, - self.options, - value, - ) - - def __repr__(self): - return f"" - - -def in_(options): - """ - A validator that raises a `ValueError` if the initializer is called - with a value that does not belong in the options provided. The check is - performed using ``value in options``. - - :param options: Allowed options. - :type options: list, tuple, `enum.Enum`, ... - - :raises ValueError: With a human readable error message, the attribute (of - type `attrs.Attribute`), the expected options, and the value it - got. - - .. versionadded:: 17.1.0 - .. versionchanged:: 22.1.0 - The ValueError was incomplete until now and only contained the human - readable error message. Now it contains all the information that has - been promised since 17.1.0. - """ - return _InValidator(options) - - -@attrs(repr=False, slots=False, hash=True) -class _IsCallableValidator: - def __call__(self, inst, attr, value): - """ - We use a callable class to be able to change the ``__repr__``. - """ - if not callable(value): - message = ( - "'{name}' must be callable " - "(got {value!r} that is a {actual!r})." - ) - raise NotCallableError( - msg=message.format( - name=attr.name, value=value, actual=value.__class__ - ), - value=value, - ) - - def __repr__(self): - return "" - - -def is_callable(): - """ - A validator that raises a `attrs.exceptions.NotCallableError` if the - initializer is called with a value for this particular attribute - that is not callable. - - .. versionadded:: 19.1.0 - - :raises attrs.exceptions.NotCallableError: With a human readable error - message containing the attribute (`attrs.Attribute`) name, - and the value it got. - """ - return _IsCallableValidator() - - -@attrs(repr=False, slots=True, hash=True) -class _DeepIterable: - member_validator = attrib(validator=is_callable()) - iterable_validator = attrib( - default=None, validator=optional(is_callable()) - ) - - def __call__(self, inst, attr, value): - """ - We use a callable class to be able to change the ``__repr__``. - """ - if self.iterable_validator is not None: - self.iterable_validator(inst, attr, value) - - for member in value: - self.member_validator(inst, attr, member) - - def __repr__(self): - iterable_identifier = ( - "" - if self.iterable_validator is None - else f" {self.iterable_validator!r}" - ) - return ( - f"" - ) - - -def deep_iterable(member_validator, iterable_validator=None): - """ - A validator that performs deep validation of an iterable. - - :param member_validator: Validator(s) to apply to iterable members - :param iterable_validator: Validator to apply to iterable itself - (optional) - - .. versionadded:: 19.1.0 - - :raises TypeError: if any sub-validators fail - """ - if isinstance(member_validator, (list, tuple)): - member_validator = and_(*member_validator) - return _DeepIterable(member_validator, iterable_validator) - - -@attrs(repr=False, slots=True, hash=True) -class _DeepMapping: - key_validator = attrib(validator=is_callable()) - value_validator = attrib(validator=is_callable()) - mapping_validator = attrib(default=None, validator=optional(is_callable())) - - def __call__(self, inst, attr, value): - """ - We use a callable class to be able to change the ``__repr__``. - """ - if self.mapping_validator is not None: - self.mapping_validator(inst, attr, value) - - for key in value: - self.key_validator(inst, attr, key) - self.value_validator(inst, attr, value[key]) - - def __repr__(self): - return ( - "" - ).format(key=self.key_validator, value=self.value_validator) - - -def deep_mapping(key_validator, value_validator, mapping_validator=None): - """ - A validator that performs deep validation of a dictionary. - - :param key_validator: Validator to apply to dictionary keys - :param value_validator: Validator to apply to dictionary values - :param mapping_validator: Validator to apply to top-level mapping - attribute (optional) - - .. versionadded:: 19.1.0 - - :raises TypeError: if any sub-validators fail - """ - return _DeepMapping(key_validator, value_validator, mapping_validator) - - -@attrs(repr=False, frozen=True, slots=True) -class _NumberValidator: - bound = attrib() - compare_op = attrib() - compare_func = attrib() - - def __call__(self, inst, attr, value): - """ - We use a callable class to be able to change the ``__repr__``. - """ - if not self.compare_func(value, self.bound): - msg = f"'{attr.name}' must be {self.compare_op} {self.bound}: {value}" - raise ValueError(msg) - - def __repr__(self): - return f"" - - -def lt(val): - """ - A validator that raises `ValueError` if the initializer is called - with a number larger or equal to *val*. - - :param val: Exclusive upper bound for values - - .. versionadded:: 21.3.0 - """ - return _NumberValidator(val, "<", operator.lt) - - -def le(val): - """ - A validator that raises `ValueError` if the initializer is called - with a number greater than *val*. - - :param val: Inclusive upper bound for values - - .. versionadded:: 21.3.0 - """ - return _NumberValidator(val, "<=", operator.le) - - -def ge(val): - """ - A validator that raises `ValueError` if the initializer is called - with a number smaller than *val*. - - :param val: Inclusive lower bound for values - - .. versionadded:: 21.3.0 - """ - return _NumberValidator(val, ">=", operator.ge) - - -def gt(val): - """ - A validator that raises `ValueError` if the initializer is called - with a number smaller or equal to *val*. - - :param val: Exclusive lower bound for values - - .. versionadded:: 21.3.0 - """ - return _NumberValidator(val, ">", operator.gt) - - -@attrs(repr=False, frozen=True, slots=True) -class _MaxLengthValidator: - max_length = attrib() - - def __call__(self, inst, attr, value): - """ - We use a callable class to be able to change the ``__repr__``. - """ - if len(value) > self.max_length: - msg = f"Length of '{attr.name}' must be <= {self.max_length}: {len(value)}" - raise ValueError(msg) - - def __repr__(self): - return f"" - - -def max_len(length): - """ - A validator that raises `ValueError` if the initializer is called - with a string or iterable that is longer than *length*. - - :param int length: Maximum length of the string or iterable - - .. versionadded:: 21.3.0 - """ - return _MaxLengthValidator(length) - - -@attrs(repr=False, frozen=True, slots=True) -class _MinLengthValidator: - min_length = attrib() - - def __call__(self, inst, attr, value): - """ - We use a callable class to be able to change the ``__repr__``. - """ - if len(value) < self.min_length: - msg = f"Length of '{attr.name}' must be >= {self.min_length}: {len(value)}" - raise ValueError(msg) - - def __repr__(self): - return f"" - - -def min_len(length): - """ - A validator that raises `ValueError` if the initializer is called - with a string or iterable that is shorter than *length*. - - :param int length: Minimum length of the string or iterable - - .. versionadded:: 22.1.0 - """ - return _MinLengthValidator(length) - - -@attrs(repr=False, slots=True, hash=True) -class _SubclassOfValidator: - type = attrib() - - def __call__(self, inst, attr, value): - """ - We use a callable class to be able to change the ``__repr__``. - """ - if not issubclass(value, self.type): - msg = f"'{attr.name}' must be a subclass of {self.type!r} (got {value!r})." - raise TypeError( - msg, - attr, - self.type, - value, - ) - - def __repr__(self): - return f"" - - -def _subclass_of(type): - """ - A validator that raises a `TypeError` if the initializer is called - with a wrong type for this particular attribute (checks are performed using - `issubclass` therefore it's also valid to pass a tuple of types). - - :param type: The type to check for. - :type type: type or tuple of types - - :raises TypeError: With a human readable error message, the attribute - (of type `attrs.Attribute`), the expected type, and the value it - got. - """ - return _SubclassOfValidator(type) - - -@attrs(repr=False, slots=True, hash=True) -class _NotValidator: - validator = attrib() - msg = attrib( - converter=default_if_none( - "not_ validator child '{validator!r}' " - "did not raise a captured error" - ) - ) - exc_types = attrib( - validator=deep_iterable( - member_validator=_subclass_of(Exception), - iterable_validator=instance_of(tuple), - ), - ) - - def __call__(self, inst, attr, value): - try: - self.validator(inst, attr, value) - except self.exc_types: - pass # suppress error to invert validity - else: - raise ValueError( - self.msg.format( - validator=self.validator, - exc_types=self.exc_types, - ), - attr, - self.validator, - value, - self.exc_types, - ) - - def __repr__(self): - return ( - "" - ).format( - what=self.validator, - exc_types=self.exc_types, - ) - - -def not_(validator, *, msg=None, exc_types=(ValueError, TypeError)): - """ - A validator that wraps and logically 'inverts' the validator passed to it. - It will raise a `ValueError` if the provided validator *doesn't* raise a - `ValueError` or `TypeError` (by default), and will suppress the exception - if the provided validator *does*. - - Intended to be used with existing validators to compose logic without - needing to create inverted variants, for example, ``not_(in_(...))``. - - :param validator: A validator to be logically inverted. - :param msg: Message to raise if validator fails. - Formatted with keys ``exc_types`` and ``validator``. - :type msg: str - :param exc_types: Exception type(s) to capture. - Other types raised by child validators will not be intercepted and - pass through. - - :raises ValueError: With a human readable error message, - the attribute (of type `attrs.Attribute`), - the validator that failed to raise an exception, - the value it got, - and the expected exception types. - - .. versionadded:: 22.2.0 - """ - try: - exc_types = tuple(exc_types) - except TypeError: - exc_types = (exc_types,) - return _NotValidator(validator, msg, exc_types) diff --git a/.venv/Lib/site-packages/attr/validators.pyi b/.venv/Lib/site-packages/attr/validators.pyi deleted file mode 100644 index d194a75..0000000 --- a/.venv/Lib/site-packages/attr/validators.pyi +++ /dev/null @@ -1,88 +0,0 @@ -from typing import ( - Any, - AnyStr, - Callable, - Container, - ContextManager, - Iterable, - List, - Mapping, - Match, - Optional, - Pattern, - Tuple, - Type, - TypeVar, - Union, - overload, -) - -from . import _ValidatorType -from . import _ValidatorArgType - -_T = TypeVar("_T") -_T1 = TypeVar("_T1") -_T2 = TypeVar("_T2") -_T3 = TypeVar("_T3") -_I = TypeVar("_I", bound=Iterable) -_K = TypeVar("_K") -_V = TypeVar("_V") -_M = TypeVar("_M", bound=Mapping) - -def set_disabled(run: bool) -> None: ... -def get_disabled() -> bool: ... -def disabled() -> ContextManager[None]: ... - -# To be more precise on instance_of use some overloads. -# If there are more than 3 items in the tuple then we fall back to Any -@overload -def instance_of(type: Type[_T]) -> _ValidatorType[_T]: ... -@overload -def instance_of(type: Tuple[Type[_T]]) -> _ValidatorType[_T]: ... -@overload -def instance_of( - type: Tuple[Type[_T1], Type[_T2]] -) -> _ValidatorType[Union[_T1, _T2]]: ... -@overload -def instance_of( - type: Tuple[Type[_T1], Type[_T2], Type[_T3]] -) -> _ValidatorType[Union[_T1, _T2, _T3]]: ... -@overload -def instance_of(type: Tuple[type, ...]) -> _ValidatorType[Any]: ... -def provides(interface: Any) -> _ValidatorType[Any]: ... -def optional( - validator: Union[ - _ValidatorType[_T], List[_ValidatorType[_T]], Tuple[_ValidatorType[_T]] - ] -) -> _ValidatorType[Optional[_T]]: ... -def in_(options: Container[_T]) -> _ValidatorType[_T]: ... -def and_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ... -def matches_re( - regex: Union[Pattern[AnyStr], AnyStr], - flags: int = ..., - func: Optional[ - Callable[[AnyStr, AnyStr, int], Optional[Match[AnyStr]]] - ] = ..., -) -> _ValidatorType[AnyStr]: ... -def deep_iterable( - member_validator: _ValidatorArgType[_T], - iterable_validator: Optional[_ValidatorType[_I]] = ..., -) -> _ValidatorType[_I]: ... -def deep_mapping( - key_validator: _ValidatorType[_K], - value_validator: _ValidatorType[_V], - mapping_validator: Optional[_ValidatorType[_M]] = ..., -) -> _ValidatorType[_M]: ... -def is_callable() -> _ValidatorType[_T]: ... -def lt(val: _T) -> _ValidatorType[_T]: ... -def le(val: _T) -> _ValidatorType[_T]: ... -def ge(val: _T) -> _ValidatorType[_T]: ... -def gt(val: _T) -> _ValidatorType[_T]: ... -def max_len(length: int) -> _ValidatorType[_T]: ... -def min_len(length: int) -> _ValidatorType[_T]: ... -def not_( - validator: _ValidatorType[_T], - *, - msg: Optional[str] = None, - exc_types: Union[Type[Exception], Iterable[Type[Exception]]] = ..., -) -> _ValidatorType[_T]: ... diff --git a/.venv/Lib/site-packages/attrs-23.2.0.dist-info/INSTALLER b/.venv/Lib/site-packages/attrs-23.2.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/.venv/Lib/site-packages/attrs-23.2.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/.venv/Lib/site-packages/attrs-23.2.0.dist-info/METADATA b/.venv/Lib/site-packages/attrs-23.2.0.dist-info/METADATA deleted file mode 100644 index c20be76..0000000 --- a/.venv/Lib/site-packages/attrs-23.2.0.dist-info/METADATA +++ /dev/null @@ -1,202 +0,0 @@ -Metadata-Version: 2.1 -Name: attrs -Version: 23.2.0 -Summary: Classes Without Boilerplate -Project-URL: Documentation, https://www.attrs.org/ -Project-URL: Changelog, https://www.attrs.org/en/stable/changelog.html -Project-URL: GitHub, https://github.com/python-attrs/attrs -Project-URL: Funding, https://github.com/sponsors/hynek -Project-URL: Tidelift, https://tidelift.com/subscription/pkg/pypi-attrs?utm_source=pypi-attrs&utm_medium=pypi -Author-email: Hynek Schlawack -License-Expression: MIT -License-File: LICENSE -Keywords: attribute,boilerplate,class -Classifier: Development Status :: 5 - Production/Stable -Classifier: License :: OSI Approved :: MIT License -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Typing :: Typed -Requires-Python: >=3.7 -Requires-Dist: importlib-metadata; python_version < '3.8' -Provides-Extra: cov -Requires-Dist: attrs[tests]; extra == 'cov' -Requires-Dist: coverage[toml]>=5.3; extra == 'cov' -Provides-Extra: dev -Requires-Dist: attrs[tests]; extra == 'dev' -Requires-Dist: pre-commit; extra == 'dev' -Provides-Extra: docs -Requires-Dist: furo; extra == 'docs' -Requires-Dist: myst-parser; extra == 'docs' -Requires-Dist: sphinx; extra == 'docs' -Requires-Dist: sphinx-notfound-page; extra == 'docs' -Requires-Dist: sphinxcontrib-towncrier; extra == 'docs' -Requires-Dist: towncrier; extra == 'docs' -Requires-Dist: zope-interface; extra == 'docs' -Provides-Extra: tests -Requires-Dist: attrs[tests-no-zope]; extra == 'tests' -Requires-Dist: zope-interface; extra == 'tests' -Provides-Extra: tests-mypy -Requires-Dist: mypy>=1.6; (platform_python_implementation == 'CPython' and python_version >= '3.8') and extra == 'tests-mypy' -Requires-Dist: pytest-mypy-plugins; (platform_python_implementation == 'CPython' and python_version >= '3.8') and extra == 'tests-mypy' -Provides-Extra: tests-no-zope -Requires-Dist: attrs[tests-mypy]; extra == 'tests-no-zope' -Requires-Dist: cloudpickle; (platform_python_implementation == 'CPython') and extra == 'tests-no-zope' -Requires-Dist: hypothesis; extra == 'tests-no-zope' -Requires-Dist: pympler; extra == 'tests-no-zope' -Requires-Dist: pytest-xdist[psutil]; extra == 'tests-no-zope' -Requires-Dist: pytest>=4.3.0; extra == 'tests-no-zope' -Description-Content-Type: text/markdown - -

    - - attrs - -

    - - -*attrs* is the Python package that will bring back the **joy** of **writing classes** by relieving you from the drudgery of implementing object protocols (aka [dunder methods](https://www.attrs.org/en/latest/glossary.html#term-dunder-methods)). -[Trusted by NASA](https://docs.github.com/en/account-and-profile/setting-up-and-managing-your-github-profile/customizing-your-profile/personalizing-your-profile#list-of-qualifying-repositories-for-mars-2020-helicopter-contributor-achievement) for Mars missions since 2020! - -Its main goal is to help you to write **concise** and **correct** software without slowing down your code. - - -## Sponsors - -*attrs* would not be possible without our [amazing sponsors](https://github.com/sponsors/hynek). -Especially those generously supporting us at the *The Organization* tier and higher: - -

    - - - -

    - -

    - Please consider joining them to help make attrs’s maintenance more sustainable! -

    - - - -## Example - -*attrs* gives you a class decorator and a way to declaratively define the attributes on that class: - - - -```pycon ->>> from attrs import asdict, define, make_class, Factory - ->>> @define -... class SomeClass: -... a_number: int = 42 -... list_of_numbers: list[int] = Factory(list) -... -... def hard_math(self, another_number): -... return self.a_number + sum(self.list_of_numbers) * another_number - - ->>> sc = SomeClass(1, [1, 2, 3]) ->>> sc -SomeClass(a_number=1, list_of_numbers=[1, 2, 3]) - ->>> sc.hard_math(3) -19 ->>> sc == SomeClass(1, [1, 2, 3]) -True ->>> sc != SomeClass(2, [3, 2, 1]) -True - ->>> asdict(sc) -{'a_number': 1, 'list_of_numbers': [1, 2, 3]} - ->>> SomeClass() -SomeClass(a_number=42, list_of_numbers=[]) - ->>> C = make_class("C", ["a", "b"]) ->>> C("foo", "bar") -C(a='foo', b='bar') -``` - -After *declaring* your attributes, *attrs* gives you: - -- a concise and explicit overview of the class's attributes, -- a nice human-readable `__repr__`, -- equality-checking methods, -- an initializer, -- and much more, - -*without* writing dull boilerplate code again and again and *without* runtime performance penalties. - -**Hate type annotations**!? -No problem! -Types are entirely **optional** with *attrs*. -Simply assign `attrs.field()` to the attributes instead of annotating them with types. - ---- - -This example uses *attrs*'s modern APIs that have been introduced in version 20.1.0, and the *attrs* package import name that has been added in version 21.3.0. -The classic APIs (`@attr.s`, `attr.ib`, plus their serious-business aliases) and the `attr` package import name will remain **indefinitely**. - -Please check out [*On The Core API Names*](https://www.attrs.org/en/latest/names.html) for a more in-depth explanation. - - -## Data Classes - -On the tin, *attrs* might remind you of `dataclasses` (and indeed, `dataclasses` [are a descendant](https://hynek.me/articles/import-attrs/) of *attrs*). -In practice it does a lot more and is more flexible. -For instance it allows you to define [special handling of NumPy arrays for equality checks](https://www.attrs.org/en/stable/comparison.html#customization), allows more ways to [plug into the initialization process](https://www.attrs.org/en/stable/init.html#hooking-yourself-into-initialization), and allows for stepping through the generated methods using a debugger. - -For more details, please refer to our [comparison page](https://www.attrs.org/en/stable/why.html#data-classes). - - -## Project Information - -- [**Changelog**](https://www.attrs.org/en/stable/changelog.html) -- [**Documentation**](https://www.attrs.org/) -- [**PyPI**](https://pypi.org/project/attrs/) -- [**Source Code**](https://github.com/python-attrs/attrs) -- [**Contributing**](https://github.com/python-attrs/attrs/blob/main/.github/CONTRIBUTING.md) -- [**Third-party Extensions**](https://github.com/python-attrs/attrs/wiki/Extensions-to-attrs) -- **Get Help**: please use the `python-attrs` tag on [StackOverflow](https://stackoverflow.com/questions/tagged/python-attrs) - - -### *attrs* for Enterprise - -Available as part of the Tidelift Subscription. - -The maintainers of *attrs* and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source packages you use to build your applications. -Save time, reduce risk, and improve code health, while paying the maintainers of the exact packages you use. -[Learn more.](https://tidelift.com/subscription/pkg/pypi-attrs?utm_source=pypi-attrs&utm_medium=referral&utm_campaign=enterprise&utm_term=repo) - -## Release Information - -### Changes - -- The type annotation for `attrs.resolve_types()` is now correct. - [#1141](https://github.com/python-attrs/attrs/issues/1141) -- Type stubs now use `typing.dataclass_transform` to decorate dataclass-like decorators, instead of the non-standard `__dataclass_transform__` special form, which is only supported by Pyright. - [#1158](https://github.com/python-attrs/attrs/issues/1158) -- Fixed serialization of namedtuple fields using `attrs.asdict/astuple()` with `retain_collection_types=True`. - [#1165](https://github.com/python-attrs/attrs/issues/1165) -- `attrs.AttrsInstance` is now a `typing.Protocol` in both type hints and code. - This allows you to subclass it along with another `Protocol`. - [#1172](https://github.com/python-attrs/attrs/issues/1172) -- If *attrs* detects that `__attrs_pre_init__` accepts more than just `self`, it will call it with the same arguments as `__init__` was called. - This allows you to, for example, pass arguments to `super().__init__()`. - [#1187](https://github.com/python-attrs/attrs/issues/1187) -- Slotted classes now transform `functools.cached_property` decorated methods to support equivalent semantics. - [#1200](https://github.com/python-attrs/attrs/issues/1200) -- Added *class_body* argument to `attrs.make_class()` to provide additional attributes for newly created classes. - It is, for example, now possible to attach methods. - [#1203](https://github.com/python-attrs/attrs/issues/1203) - - ---- - -[Full changelog](https://www.attrs.org/en/stable/changelog.html) diff --git a/.venv/Lib/site-packages/attrs-23.2.0.dist-info/RECORD b/.venv/Lib/site-packages/attrs-23.2.0.dist-info/RECORD deleted file mode 100644 index 79ee131..0000000 --- a/.venv/Lib/site-packages/attrs-23.2.0.dist-info/RECORD +++ /dev/null @@ -1,55 +0,0 @@ -attr/__init__.py,sha256=WlXJN6ICB0Y_HZ0lmuTUgia0kuSdn2p67d4N6cYxNZM,3307 -attr/__init__.pyi,sha256=u08EujYHy_rSyebNn-I9Xv2S_cXmtA9xWGc0cBsyl18,16976 -attr/__pycache__/__init__.cpython-311.pyc,, -attr/__pycache__/_cmp.cpython-311.pyc,, -attr/__pycache__/_compat.cpython-311.pyc,, -attr/__pycache__/_config.cpython-311.pyc,, -attr/__pycache__/_funcs.cpython-311.pyc,, -attr/__pycache__/_make.cpython-311.pyc,, -attr/__pycache__/_next_gen.cpython-311.pyc,, -attr/__pycache__/_version_info.cpython-311.pyc,, -attr/__pycache__/converters.cpython-311.pyc,, -attr/__pycache__/exceptions.cpython-311.pyc,, -attr/__pycache__/filters.cpython-311.pyc,, -attr/__pycache__/setters.cpython-311.pyc,, -attr/__pycache__/validators.cpython-311.pyc,, -attr/_cmp.py,sha256=OQZlWdFX74z18adGEUp40Ojqm0NNu1Flqnv2JE8B2ng,4025 -attr/_cmp.pyi,sha256=sGQmOM0w3_K4-X8cTXR7g0Hqr290E8PTObA9JQxWQqc,399 -attr/_compat.py,sha256=QmRyxii295wcQfaugWqxuIumAPsNQ2-RUF82QZPqMKw,2540 -attr/_config.py,sha256=z81Vt-GeT_2taxs1XZfmHx9TWlSxjPb6eZH1LTGsS54,843 -attr/_funcs.py,sha256=VBTUFKLklsmqxys3qWSTK_Ac9Z4s0mAJWwgW9nA7Llk,17173 -attr/_make.py,sha256=LnVy2e0HygoqaZknhC19z7JmOt7qGkAadf2LZgWVJWI,101923 -attr/_next_gen.py,sha256=as1voi8siAI_o2OQG8YIiZvmn0G7-S3_j_774rnoZ_g,6203 -attr/_typing_compat.pyi,sha256=XDP54TUn-ZKhD62TOQebmzrwFyomhUCoGRpclb6alRA,469 -attr/_version_info.py,sha256=exSqb3b5E-fMSsgZAlEw9XcLpEgobPORCZpcaEglAM4,2121 -attr/_version_info.pyi,sha256=x_M3L3WuB7r_ULXAWjx959udKQ4HLB8l-hsc1FDGNvk,209 -attr/converters.py,sha256=Kyw5MY0yfnUR_RwN1Vydf0EiE---htDxOgSc_-NYL6A,3622 -attr/converters.pyi,sha256=jKlpHBEt6HVKJvgrMFJRrHq8p61GXg4-Nd5RZWKJX7M,406 -attr/exceptions.py,sha256=HRFq4iybmv7-DcZwyjl6M1euM2YeJVK_hFxuaBGAngI,1977 -attr/exceptions.pyi,sha256=zZq8bCUnKAy9mDtBEw42ZhPhAUIHoTKedDQInJD883M,539 -attr/filters.py,sha256=9pYvXqdg6mtLvKIIb56oALRMoHFnQTcGCO4EXTc1qyM,1470 -attr/filters.pyi,sha256=0mRCjLKxdcvAo0vD-Cr81HfRXXCp9j_cAXjOoAHtPGM,225 -attr/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -attr/setters.py,sha256=pbCZQ-pE6ZxjDqZfWWUhUFefXtpekIU4qS_YDMLPQ50,1400 -attr/setters.pyi,sha256=pyY8TVNBu8TWhOldv_RxHzmGvdgFQH981db70r0fn5I,567 -attr/validators.py,sha256=LGVpbiNg_KGzYrKUD5JPiZkx8TMfynDZGoQoLJNCIMo,19676 -attr/validators.pyi,sha256=167Dl9nt7NUhE9wht1I-buo039qyUT1nEUT_nKjSWr4,2580 -attrs-23.2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -attrs-23.2.0.dist-info/METADATA,sha256=WwvG7OHyKjEPpyFUZCCYt1n0E_CcqdRb7bliGEdcm-A,9531 -attrs-23.2.0.dist-info/RECORD,, -attrs-23.2.0.dist-info/WHEEL,sha256=mRYSEL3Ih6g5a_CVMIcwiF__0Ae4_gLYh01YFNwiq1k,87 -attrs-23.2.0.dist-info/licenses/LICENSE,sha256=iCEVyV38KvHutnFPjsbVy8q_Znyv-HKfQkINpj9xTp8,1109 -attrs/__init__.py,sha256=9_5waVbFs7rLqtXZ73tNDrxhezyZ8VZeX4BbvQ3EeJw,1039 -attrs/__init__.pyi,sha256=s_ajQ_U14DOsOz0JbmAKDOi46B3v2PcdO0UAV1MY6Ek,2168 -attrs/__pycache__/__init__.cpython-311.pyc,, -attrs/__pycache__/converters.cpython-311.pyc,, -attrs/__pycache__/exceptions.cpython-311.pyc,, -attrs/__pycache__/filters.cpython-311.pyc,, -attrs/__pycache__/setters.cpython-311.pyc,, -attrs/__pycache__/validators.cpython-311.pyc,, -attrs/converters.py,sha256=8kQljrVwfSTRu8INwEk8SI0eGrzmWftsT7rM0EqyohM,76 -attrs/exceptions.py,sha256=ACCCmg19-vDFaDPY9vFl199SPXCQMN_bENs4DALjzms,76 -attrs/filters.py,sha256=VOUMZug9uEU6dUuA0dF1jInUK0PL3fLgP0VBS5d-CDE,73 -attrs/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -attrs/setters.py,sha256=eL1YidYQV3T2h9_SYIZSZR1FAcHGb1TuCTy0E0Lv2SU,73 -attrs/validators.py,sha256=xcy6wD5TtTkdCG1f4XWbocPSO0faBjk5IfVJfP6SUj0,76 diff --git a/.venv/Lib/site-packages/attrs-23.2.0.dist-info/WHEEL b/.venv/Lib/site-packages/attrs-23.2.0.dist-info/WHEEL deleted file mode 100644 index 2860816..0000000 --- a/.venv/Lib/site-packages/attrs-23.2.0.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: hatchling 1.21.0 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/.venv/Lib/site-packages/attrs-23.2.0.dist-info/licenses/LICENSE b/.venv/Lib/site-packages/attrs-23.2.0.dist-info/licenses/LICENSE deleted file mode 100644 index 2bd6453..0000000 --- a/.venv/Lib/site-packages/attrs-23.2.0.dist-info/licenses/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2015 Hynek Schlawack and the attrs contributors - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/.venv/Lib/site-packages/attrs/__init__.py b/.venv/Lib/site-packages/attrs/__init__.py deleted file mode 100644 index 0c24815..0000000 --- a/.venv/Lib/site-packages/attrs/__init__.py +++ /dev/null @@ -1,65 +0,0 @@ -# SPDX-License-Identifier: MIT - -from attr import ( - NOTHING, - Attribute, - AttrsInstance, - Factory, - _make_getattr, - assoc, - cmp_using, - define, - evolve, - field, - fields, - fields_dict, - frozen, - has, - make_class, - mutable, - resolve_types, - validate, -) -from attr._next_gen import asdict, astuple - -from . import converters, exceptions, filters, setters, validators - - -__all__ = [ - "__author__", - "__copyright__", - "__description__", - "__doc__", - "__email__", - "__license__", - "__title__", - "__url__", - "__version__", - "__version_info__", - "asdict", - "assoc", - "astuple", - "Attribute", - "AttrsInstance", - "cmp_using", - "converters", - "define", - "evolve", - "exceptions", - "Factory", - "field", - "fields_dict", - "fields", - "filters", - "frozen", - "has", - "make_class", - "mutable", - "NOTHING", - "resolve_types", - "setters", - "validate", - "validators", -] - -__getattr__ = _make_getattr(__name__) diff --git a/.venv/Lib/site-packages/attrs/__init__.pyi b/.venv/Lib/site-packages/attrs/__init__.pyi deleted file mode 100644 index 9372cfe..0000000 --- a/.venv/Lib/site-packages/attrs/__init__.pyi +++ /dev/null @@ -1,67 +0,0 @@ -from typing import ( - Any, - Callable, - Dict, - Mapping, - Optional, - Sequence, - Tuple, - Type, -) - -# Because we need to type our own stuff, we have to make everything from -# attr explicitly public too. -from attr import __author__ as __author__ -from attr import __copyright__ as __copyright__ -from attr import __description__ as __description__ -from attr import __email__ as __email__ -from attr import __license__ as __license__ -from attr import __title__ as __title__ -from attr import __url__ as __url__ -from attr import __version__ as __version__ -from attr import __version_info__ as __version_info__ -from attr import _FilterType -from attr import assoc as assoc -from attr import Attribute as Attribute -from attr import AttrsInstance as AttrsInstance -from attr import cmp_using as cmp_using -from attr import converters as converters -from attr import define as define -from attr import evolve as evolve -from attr import exceptions as exceptions -from attr import Factory as Factory -from attr import field as field -from attr import fields as fields -from attr import fields_dict as fields_dict -from attr import filters as filters -from attr import frozen as frozen -from attr import has as has -from attr import make_class as make_class -from attr import mutable as mutable -from attr import NOTHING as NOTHING -from attr import resolve_types as resolve_types -from attr import setters as setters -from attr import validate as validate -from attr import validators as validators - -# TODO: see definition of attr.asdict/astuple -def asdict( - inst: AttrsInstance, - recurse: bool = ..., - filter: Optional[_FilterType[Any]] = ..., - dict_factory: Type[Mapping[Any, Any]] = ..., - retain_collection_types: bool = ..., - value_serializer: Optional[ - Callable[[type, Attribute[Any], Any], Any] - ] = ..., - tuple_keys: bool = ..., -) -> Dict[str, Any]: ... - -# TODO: add support for returning NamedTuple from the mypy plugin -def astuple( - inst: AttrsInstance, - recurse: bool = ..., - filter: Optional[_FilterType[Any]] = ..., - tuple_factory: Type[Sequence[Any]] = ..., - retain_collection_types: bool = ..., -) -> Tuple[Any, ...]: ... diff --git a/.venv/Lib/site-packages/attrs/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/attrs/__pycache__/__init__.cpython-311.pyc deleted file mode 100644 index 9f132d5..0000000 Binary files a/.venv/Lib/site-packages/attrs/__pycache__/__init__.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/attrs/__pycache__/converters.cpython-311.pyc b/.venv/Lib/site-packages/attrs/__pycache__/converters.cpython-311.pyc deleted file mode 100644 index d88ea36..0000000 Binary files a/.venv/Lib/site-packages/attrs/__pycache__/converters.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/attrs/__pycache__/exceptions.cpython-311.pyc b/.venv/Lib/site-packages/attrs/__pycache__/exceptions.cpython-311.pyc deleted file mode 100644 index 99756ab..0000000 Binary files a/.venv/Lib/site-packages/attrs/__pycache__/exceptions.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/attrs/__pycache__/filters.cpython-311.pyc b/.venv/Lib/site-packages/attrs/__pycache__/filters.cpython-311.pyc deleted file mode 100644 index 4386433..0000000 Binary files a/.venv/Lib/site-packages/attrs/__pycache__/filters.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/attrs/__pycache__/setters.cpython-311.pyc b/.venv/Lib/site-packages/attrs/__pycache__/setters.cpython-311.pyc deleted file mode 100644 index 6616b2a..0000000 Binary files a/.venv/Lib/site-packages/attrs/__pycache__/setters.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/attrs/__pycache__/validators.cpython-311.pyc b/.venv/Lib/site-packages/attrs/__pycache__/validators.cpython-311.pyc deleted file mode 100644 index c356025..0000000 Binary files a/.venv/Lib/site-packages/attrs/__pycache__/validators.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/attrs/converters.py b/.venv/Lib/site-packages/attrs/converters.py deleted file mode 100644 index 7821f6c..0000000 --- a/.venv/Lib/site-packages/attrs/converters.py +++ /dev/null @@ -1,3 +0,0 @@ -# SPDX-License-Identifier: MIT - -from attr.converters import * # noqa: F403 diff --git a/.venv/Lib/site-packages/attrs/exceptions.py b/.venv/Lib/site-packages/attrs/exceptions.py deleted file mode 100644 index 3323f9d..0000000 --- a/.venv/Lib/site-packages/attrs/exceptions.py +++ /dev/null @@ -1,3 +0,0 @@ -# SPDX-License-Identifier: MIT - -from attr.exceptions import * # noqa: F403 diff --git a/.venv/Lib/site-packages/attrs/filters.py b/.venv/Lib/site-packages/attrs/filters.py deleted file mode 100644 index 3080f48..0000000 --- a/.venv/Lib/site-packages/attrs/filters.py +++ /dev/null @@ -1,3 +0,0 @@ -# SPDX-License-Identifier: MIT - -from attr.filters import * # noqa: F403 diff --git a/.venv/Lib/site-packages/attrs/py.typed b/.venv/Lib/site-packages/attrs/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/.venv/Lib/site-packages/attrs/setters.py b/.venv/Lib/site-packages/attrs/setters.py deleted file mode 100644 index f3d73bb..0000000 --- a/.venv/Lib/site-packages/attrs/setters.py +++ /dev/null @@ -1,3 +0,0 @@ -# SPDX-License-Identifier: MIT - -from attr.setters import * # noqa: F403 diff --git a/.venv/Lib/site-packages/attrs/validators.py b/.venv/Lib/site-packages/attrs/validators.py deleted file mode 100644 index 037e124..0000000 --- a/.venv/Lib/site-packages/attrs/validators.py +++ /dev/null @@ -1,3 +0,0 @@ -# SPDX-License-Identifier: MIT - -from attr.validators import * # noqa: F403 diff --git a/.venv/Lib/site-packages/discord.py-2.3.2.dist-info/INSTALLER b/.venv/Lib/site-packages/discord.py-2.3.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/.venv/Lib/site-packages/discord.py-2.3.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/.venv/Lib/site-packages/discord.py-2.3.2.dist-info/LICENSE b/.venv/Lib/site-packages/discord.py-2.3.2.dist-info/LICENSE deleted file mode 100644 index 700c21b..0000000 --- a/.venv/Lib/site-packages/discord.py-2.3.2.dist-info/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2015-present Rapptz - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the "Software"), -to deal in the Software without restriction, including without limitation -the rights to use, copy, modify, merge, publish, distribute, sublicense, -and/or sell copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. diff --git a/.venv/Lib/site-packages/discord.py-2.3.2.dist-info/METADATA b/.venv/Lib/site-packages/discord.py-2.3.2.dist-info/METADATA deleted file mode 100644 index acbc57a..0000000 --- a/.venv/Lib/site-packages/discord.py-2.3.2.dist-info/METADATA +++ /dev/null @@ -1,166 +0,0 @@ -Metadata-Version: 2.1 -Name: discord.py -Version: 2.3.2 -Summary: A Python wrapper for the Discord API -Home-page: https://github.com/Rapptz/discord.py -Author: Rapptz -License: MIT -Project-URL: Documentation, https://discordpy.readthedocs.io/en/latest/ -Project-URL: Issue tracker, https://github.com/Rapptz/discord.py/issues -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: License :: OSI Approved :: MIT License -Classifier: Intended Audience :: Developers -Classifier: Natural Language :: English -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Topic :: Internet -Classifier: Topic :: Software Development :: Libraries -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Utilities -Classifier: Typing :: Typed -Requires-Python: >=3.8.0 -Description-Content-Type: text/x-rst -License-File: LICENSE -Requires-Dist: aiohttp (<4,>=3.7.4) -Provides-Extra: docs -Requires-Dist: sphinx (==4.4.0) ; extra == 'docs' -Requires-Dist: sphinxcontrib-trio (==1.1.2) ; extra == 'docs' -Requires-Dist: sphinxcontrib-websupport ; extra == 'docs' -Requires-Dist: typing-extensions (<5,>=4.3) ; extra == 'docs' -Provides-Extra: speed -Requires-Dist: orjson (>=3.5.4) ; extra == 'speed' -Requires-Dist: aiodns (>=1.1) ; extra == 'speed' -Requires-Dist: Brotli ; extra == 'speed' -Requires-Dist: cchardet (==2.1.7) ; (python_version < "3.10") and extra == 'speed' -Provides-Extra: test -Requires-Dist: coverage[toml] ; extra == 'test' -Requires-Dist: pytest ; extra == 'test' -Requires-Dist: pytest-asyncio ; extra == 'test' -Requires-Dist: pytest-cov ; extra == 'test' -Requires-Dist: pytest-mock ; extra == 'test' -Requires-Dist: typing-extensions (<5,>=4.3) ; extra == 'test' -Provides-Extra: voice -Requires-Dist: PyNaCl (<1.6,>=1.3.0) ; extra == 'voice' - -discord.py -========== - -.. image:: https://discord.com/api/guilds/336642139381301249/embed.png - :target: https://discord.gg/r3sSKJJ - :alt: Discord server invite -.. image:: https://img.shields.io/pypi/v/discord.py.svg - :target: https://pypi.python.org/pypi/discord.py - :alt: PyPI version info -.. image:: https://img.shields.io/pypi/pyversions/discord.py.svg - :target: https://pypi.python.org/pypi/discord.py - :alt: PyPI supported Python versions - -A modern, easy to use, feature-rich, and async ready API wrapper for Discord written in Python. - -Key Features -------------- - -- Modern Pythonic API using ``async`` and ``await``. -- Proper rate limit handling. -- Optimised in both speed and memory. - -Installing ----------- - -**Python 3.8 or higher is required** - -To install the library without full voice support, you can just run the following command: - -.. code:: sh - - # Linux/macOS - python3 -m pip install -U discord.py - - # Windows - py -3 -m pip install -U discord.py - -Otherwise to get voice support you should run the following command: - -.. code:: sh - - # Linux/macOS - python3 -m pip install -U "discord.py[voice]" - - # Windows - py -3 -m pip install -U discord.py[voice] - - -To install the development version, do the following: - -.. code:: sh - - $ git clone https://github.com/Rapptz/discord.py - $ cd discord.py - $ python3 -m pip install -U .[voice] - - -Optional Packages -~~~~~~~~~~~~~~~~~~ - -* `PyNaCl `__ (for voice support) - -Please note that when installing voice support on Linux, you must install the following packages via your favourite package manager (e.g. ``apt``, ``dnf``, etc) before running the above commands: - -* libffi-dev (or ``libffi-devel`` on some systems) -* python-dev (e.g. ``python3.8-dev`` for Python 3.8) - -Quick Example --------------- - -.. code:: py - - import discord - - class MyClient(discord.Client): - async def on_ready(self): - print('Logged on as', self.user) - - async def on_message(self, message): - # don't respond to ourselves - if message.author == self.user: - return - - if message.content == 'ping': - await message.channel.send('pong') - - intents = discord.Intents.default() - intents.message_content = True - client = MyClient(intents=intents) - client.run('token') - -Bot Example -~~~~~~~~~~~~~ - -.. code:: py - - import discord - from discord.ext import commands - - intents = discord.Intents.default() - intents.message_content = True - bot = commands.Bot(command_prefix='>', intents=intents) - - @bot.command() - async def ping(ctx): - await ctx.send('pong') - - bot.run('token') - -You can find more examples in the examples directory. - -Links ------- - -- `Documentation `_ -- `Official Discord Server `_ -- `Discord API `_ - - diff --git a/.venv/Lib/site-packages/discord.py-2.3.2.dist-info/RECORD b/.venv/Lib/site-packages/discord.py-2.3.2.dist-info/RECORD deleted file mode 100644 index 73ff080..0000000 --- a/.venv/Lib/site-packages/discord.py-2.3.2.dist-info/RECORD +++ /dev/null @@ -1,244 +0,0 @@ -discord.py-2.3.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -discord.py-2.3.2.dist-info/LICENSE,sha256=IRr8eHptwl13Oez9dujx-pRmN028VYOGiW2Yzf7lEn0,1081 -discord.py-2.3.2.dist-info/METADATA,sha256=3Kuv_E0jlGPe7jltcyg5MicDBvmrdNYF3OONjUgw3N8,4897 -discord.py-2.3.2.dist-info/RECORD,, -discord.py-2.3.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -discord.py-2.3.2.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 -discord.py-2.3.2.dist-info/top_level.txt,sha256=fJkrNbR-_8ubMBUcDEJBcfkpECrvSEmMrNKgvLlQFoM,8 -discord/__init__.py,sha256=75ePASY8wcoRPB8TxKbIfi9HBdv5INqHw37UvfQJ2ZY,1886 -discord/__main__.py,sha256=DEe4CqYJGe53oxJsZAQVXNRQUEUILXpo1r5iwp5wIW8,11051 -discord/__pycache__/__init__.cpython-311.pyc,, -discord/__pycache__/__main__.cpython-311.pyc,, -discord/__pycache__/_types.cpython-311.pyc,, -discord/__pycache__/abc.cpython-311.pyc,, -discord/__pycache__/activity.cpython-311.pyc,, -discord/__pycache__/appinfo.cpython-311.pyc,, -discord/__pycache__/asset.cpython-311.pyc,, -discord/__pycache__/audit_logs.cpython-311.pyc,, -discord/__pycache__/automod.cpython-311.pyc,, -discord/__pycache__/backoff.cpython-311.pyc,, -discord/__pycache__/channel.cpython-311.pyc,, -discord/__pycache__/client.cpython-311.pyc,, -discord/__pycache__/colour.cpython-311.pyc,, -discord/__pycache__/components.cpython-311.pyc,, -discord/__pycache__/context_managers.cpython-311.pyc,, -discord/__pycache__/embeds.cpython-311.pyc,, -discord/__pycache__/emoji.cpython-311.pyc,, -discord/__pycache__/enums.cpython-311.pyc,, -discord/__pycache__/errors.cpython-311.pyc,, -discord/__pycache__/file.cpython-311.pyc,, -discord/__pycache__/flags.cpython-311.pyc,, -discord/__pycache__/gateway.cpython-311.pyc,, -discord/__pycache__/guild.cpython-311.pyc,, -discord/__pycache__/http.cpython-311.pyc,, -discord/__pycache__/integrations.cpython-311.pyc,, -discord/__pycache__/interactions.cpython-311.pyc,, -discord/__pycache__/invite.cpython-311.pyc,, -discord/__pycache__/member.cpython-311.pyc,, -discord/__pycache__/mentions.cpython-311.pyc,, -discord/__pycache__/message.cpython-311.pyc,, -discord/__pycache__/mixins.cpython-311.pyc,, -discord/__pycache__/object.cpython-311.pyc,, -discord/__pycache__/oggparse.cpython-311.pyc,, -discord/__pycache__/opus.cpython-311.pyc,, -discord/__pycache__/partial_emoji.cpython-311.pyc,, -discord/__pycache__/permissions.cpython-311.pyc,, -discord/__pycache__/player.cpython-311.pyc,, -discord/__pycache__/raw_models.cpython-311.pyc,, -discord/__pycache__/reaction.cpython-311.pyc,, -discord/__pycache__/role.cpython-311.pyc,, -discord/__pycache__/scheduled_event.cpython-311.pyc,, -discord/__pycache__/shard.cpython-311.pyc,, -discord/__pycache__/stage_instance.cpython-311.pyc,, -discord/__pycache__/state.cpython-311.pyc,, -discord/__pycache__/sticker.cpython-311.pyc,, -discord/__pycache__/team.cpython-311.pyc,, -discord/__pycache__/template.cpython-311.pyc,, -discord/__pycache__/threads.cpython-311.pyc,, -discord/__pycache__/user.cpython-311.pyc,, -discord/__pycache__/utils.cpython-311.pyc,, -discord/__pycache__/voice_client.cpython-311.pyc,, -discord/__pycache__/welcome_screen.cpython-311.pyc,, -discord/__pycache__/widget.cpython-311.pyc,, -discord/_types.py,sha256=b6Ij97rnyo9WGs3qVFyRQ210LsIApH7Jr3-ZfmxU268,1410 -discord/abc.py,sha256=zhtYcxn2JfG72MT_d0kdVQiWQe_8UxXnO5uSJy4H1U0,65822 -discord/activity.py,sha256=2q-Kah0_BMOrwQ3q4_8OxACu_1gPh719T9g4eKEaCPs,26864 -discord/app_commands/__init__.py,sha256=xaANFF28sifxXO__x6eSd7r5r7UjpO-tMQirjaKiNSc,424 -discord/app_commands/__pycache__/__init__.cpython-311.pyc,, -discord/app_commands/__pycache__/checks.cpython-311.pyc,, -discord/app_commands/__pycache__/commands.cpython-311.pyc,, -discord/app_commands/__pycache__/errors.cpython-311.pyc,, -discord/app_commands/__pycache__/models.cpython-311.pyc,, -discord/app_commands/__pycache__/namespace.cpython-311.pyc,, -discord/app_commands/__pycache__/transformers.cpython-311.pyc,, -discord/app_commands/__pycache__/translator.cpython-311.pyc,, -discord/app_commands/__pycache__/tree.cpython-311.pyc,, -discord/app_commands/checks.py,sha256=Z0OIrHuoYHu3eE2q05luQ_MQGrqSC5XhA0rourJ9tFs,18077 -discord/app_commands/commands.py,sha256=NR0z9bKQUvyGr55jNGWoCJhqTJweXmK3iNlNoJ7YW_A,94536 -discord/app_commands/errors.py,sha256=yU3Rb77UoKjQTInnTtPlq6osBnoJLK3L_FrcnAD3nR4,19006 -discord/app_commands/models.py,sha256=blPfJeLmez-1Le_maQ1SiGd79KfNMF0ooRPAW1zsfFQ,38502 -discord/app_commands/namespace.py,sha256=oNfmQPNDbfW3uTGadOOw8F_X872rX4-qNFYR8ZhDAKw,13123 -discord/app_commands/transformers.py,sha256=8iP3ApdisRXfiwAhEwS0kwygeFn-sVrtgA-VgEwS_7U,32512 -discord/app_commands/translator.py,sha256=m9ENDO7GGCJXiSuhCCvXi1jW4ccHOBEM0Hk9qomS6I0,10686 -discord/app_commands/tree.py,sha256=vhwRreDEPqr7otd_NVz06c0cidW15_RXkBlI-zkCisg,47965 -discord/appinfo.py,sha256=4Y-XqFxOuB_roDceQcUNRs0f3BogaHyOuOtmmlB2MTg,12713 -discord/asset.py,sha256=SuZBQ9mgz092zUpXem2gco0WOOAOiW9pJpWpbfJtsUI,15837 -discord/audit_logs.py,sha256=pFGUg9oKneMHBZ1CJY6cewWRKXpgZ72WcmMTyk1oGgs,35367 -discord/automod.py,sha256=bMK9tloCA0iU0CSKpPwVikoi00erIyxBcQJnZLyq_r0,23833 -discord/backoff.py,sha256=3yQ0uJbQ3ij7Tmdzv7GdhqfHjbNVO4eTB3Eu0VCxuvM,3751 -discord/bin/libopus-0.x64.dll,sha256=yE2oNujZJCGsMFhCz-WnJImO0J00DUaxKeIQvclEgTE,441856 -discord/bin/libopus-0.x86.dll,sha256=O1v-EpUPNQQ-110rb6kCyTbWelBxYL92NY1nx2wdveg,366080 -discord/channel.py,sha256=xmb1oJ4i6AwIiiT1HmEfAkNq0LagyUK8pRgeMkgcLpA,116803 -discord/client.py,sha256=hhT8HCdxmM7EkJOIjsdNs_5gsf7A0HPw2AEt9bDHDK4,85400 -discord/colour.py,sha256=l8-TuIYTNwaDGdIRLwjHNHsMjqjNl_pJ-RscHkXJcDM,14403 -discord/components.py,sha256=1koyIHyOVLKdsd3trSYyPAQ37zOn7IMCbFBsbOu4mz4,16850 -discord/context_managers.py,sha256=hloaEAAhLcDB-QfyoCPEpH_vsc3y4WLs1Ujs352tLbQ,3032 -discord/embeds.py,sha256=sijnFNm84KCoo_IUPUUC1bzX1IWyMfPcBPFXFocq50g,22722 -discord/emoji.py,sha256=ucJuzNIfAbvDBf8QxFrD4RyVwxDGCRyl7Zr_QWI5ZRo,8574 -discord/enums.py,sha256=gErFkTYnvfwE-EaOFQfMT9kAZhCLY4-Hj3Ut-geNpfQ,22242 -discord/errors.py,sha256=h0BHhp-UjoWTvoxqlLjtOziM9nxVFR8qMHZmEt8m5oc,8952 -discord/ext/commands/__init__.py,sha256=ZQPvApylgqC07qrj80DDkT4Dbd7j_OVy5Xw4RiYZJRc,437 -discord/ext/commands/__pycache__/__init__.cpython-311.pyc,, -discord/ext/commands/__pycache__/_types.cpython-311.pyc,, -discord/ext/commands/__pycache__/bot.cpython-311.pyc,, -discord/ext/commands/__pycache__/cog.cpython-311.pyc,, -discord/ext/commands/__pycache__/context.cpython-311.pyc,, -discord/ext/commands/__pycache__/converter.cpython-311.pyc,, -discord/ext/commands/__pycache__/cooldowns.cpython-311.pyc,, -discord/ext/commands/__pycache__/core.cpython-311.pyc,, -discord/ext/commands/__pycache__/errors.cpython-311.pyc,, -discord/ext/commands/__pycache__/flags.cpython-311.pyc,, -discord/ext/commands/__pycache__/help.cpython-311.pyc,, -discord/ext/commands/__pycache__/hybrid.cpython-311.pyc,, -discord/ext/commands/__pycache__/parameters.cpython-311.pyc,, -discord/ext/commands/__pycache__/view.cpython-311.pyc,, -discord/ext/commands/_types.py,sha256=ULLyGU6nLcITfnS-yTrlRU0N3e0zXETo2w8dBWKMrf4,2638 -discord/ext/commands/bot.py,sha256=3u57VTED60mwKrRmd8eH5GogWZLMfNA90qKh6oPhtHA,51719 -discord/ext/commands/cog.py,sha256=54f5j_I-fU30zFY-cxin_x3sHfsSjntm_w_arbhsiXI,30245 -discord/ext/commands/context.py,sha256=2cJRDhyncu1NIURGumt0oo3GMN64EMoqf2JWE_j5FzA,40048 -discord/ext/commands/converter.py,sha256=lJIxLDXxnwVHdAQfuM3JROgxplytPGedW9LtFRGXlSc,46172 -discord/ext/commands/cooldowns.py,sha256=0TrIBTDYLz2PhVx7yE9RDNoCENFRlLDEw1e8nvabm1k,9716 -discord/ext/commands/core.py,sha256=KMPSoicVLfCoX9771BbHgYqOVzaSUKQMk9gUfQBobPY,89619 -discord/ext/commands/errors.py,sha256=ZQJ2slp3HVUH3aWvOU1iLLVxpxA1LsjQOjKTj9mdmEQ,36450 -discord/ext/commands/flags.py,sha256=1LaqI9z_lnxl380W1THHg-tk7ta0ht0_gPWPFmdFDjY,22966 -discord/ext/commands/help.py,sha256=Qnfby8ZQ6RFKP31KgP4gk2eq0e8sSbNwd548sAdQPHI,57873 -discord/ext/commands/hybrid.py,sha256=0FmvT7xovJJt5tEknVelZ5Dxi3iu9A3CqDMftBCNV30,36595 -discord/ext/commands/parameters.py,sha256=3OUpzbljB6GLazvW51Rq1XlVbEGiWC-Bsfjk3_RW4-4,9295 -discord/ext/commands/view.py,sha256=lwRVmdXEkTIyzLLacr7Wz6jFY0s7KEbzuFgzx8Wgjog,6247 -discord/ext/tasks/__init__.py,sha256=R33gptpvC616c9c8L52fKDTl73w4_2YNWQvMolyPCcM,29180 -discord/ext/tasks/__pycache__/__init__.cpython-311.pyc,, -discord/file.py,sha256=YeNS1cUZlcnMnx6QmPjpT7wT7h8hXOa11T1hz8cpr0Y,5378 -discord/flags.py,sha256=rRefO1GRBhvW0_lDwxwlSjfe9VwpYq8j_z9mjk8EKRE,54264 -discord/gateway.py,sha256=G_ngUX5CiWU5JBPDElRf4yLHxKgmzKNHghbasvK0L3Q,35191 -discord/guild.py,sha256=bBGGP64iI_aw0_EP61VkLQcsNsoJqIUNXQJGl50LL30,150337 -discord/http.py,sha256=PI68vFC3ez3p8b_hJjShJWejLjQ8-OTPF5nOqdlalU8,90056 -discord/integrations.py,sha256=P2s9NjyWhlcdQs3IcRrVdr6wHmCVszwXSlhXNnU_CEk,13334 -discord/interactions.py,sha256=OIRG_5XceVKl52OtiICzI0StWmkUT_pvdQLEvVoNTFs,45246 -discord/invite.py,sha256=fRK2UbCwAoLY8mTCCThZOfsaCbMC6yM3of2dswXG9w0,20595 -discord/member.py,sha256=xqdEgNuaaTTWPBO9e7YARxKMxleVNvhLtyRMKnwKGE0,41018 -discord/mentions.py,sha256=2DZE_Uh2sDIoext6-bAXkdyWAjRVSAV3GrMYxKFIL14,5592 -discord/message.py,sha256=LZv2wc_XjrYQuD7wThnffnvLlrYmPBTSl4JXRwm-QBg,85375 -discord/mixins.py,sha256=_mKBOfdhQKNwlVx2m4jbJmOvtWxkuWSBEbnhxNDd4qo,1485 -discord/object.py,sha256=KHaw4UBOgcw74m-uXI9uU3mba2wAIgEKeaP5n2uuKsQ,3702 -discord/oggparse.py,sha256=F_wjGRC2TWeq0tMltNukivWSP-YUdP8WAIOy-oVqNmU,3646 -discord/opus.py,sha256=zfeQfMAvH7zRu6Msu42Thy9PMtLm8IdIqQk3ygTAtJg,15233 -discord/partial_emoji.py,sha256=OtsS-zYNyk2bv-ZepZ4z4RSAxZ40d8eQDsiob6_-Om0,7954 -discord/permissions.py,sha256=9LMdh0u5t-j1FWLMf1BcZmBndkZHwY2rnZqNmH8jBrc,30153 -discord/player.py,sha256=RNDcaWGl0_4rQvTCQSSQmnMwdTjQMfFBQ2kfD4Pief0,26498 -discord/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -discord/raw_models.py,sha256=9aCXMFEbBiEXUeJ5u1Gkrk2WH0MFn3DI-NE2mKHqmSM,16826 -discord/reaction.py,sha256=5LEcjk3i33sdBCF6l54wF90473OyjuNU7SBBzJA2tE0,8207 -discord/role.py,sha256=xyzYK1VJIrP_LAcem68XqOylfp8ZaCsrsZOBbKEO_eI,17906 -discord/scheduled_event.py,sha256=rCduKyK3HaEGxGXM6VJYvig52VQsy6ReWqIFuOe8Ej8,23629 -discord/shard.py,sha256=LV60FOxFm6-_EN0fLDgjGZPBpavO6c8YN2PyPrGtfVI,20399 -discord/stage_instance.py,sha256=TBiXfIoaMKemDZLj_Fy7glDo4mnrtJu735O5catVM7c,6498 -discord/state.py,sha256=9UDn_zIXMycsvBMKLfhPJkmn66mZ0e_1QnMlLaBVwnE,73376 -discord/sticker.py,sha256=RiqwVAU5cfEAk87_zJozoWJlE1_SJU1cl9mQKexr8jE,16039 -discord/team.py,sha256=Gj6mL0d1jIuO3dX3JMQjyyPzelPuMgbYP7wvCOkVx3E,4792 -discord/template.py,sha256=hlxSwq35_wZ8anANs4aStCzDdQNMl0QE0qZA4bhr6YU,9574 -discord/threads.py,sha256=-SA_j8g0P4tRDq3Z15HX2-sjSeVlXKGzLD3SOv_4k8Q,32456 -discord/types/__init__.py,sha256=7kT6hLaDiMVwuJvp4Os08kxqu9bxX3Yr9FFcOGYd6YQ,149 -discord/types/__pycache__/__init__.cpython-311.pyc,, -discord/types/__pycache__/activity.cpython-311.pyc,, -discord/types/__pycache__/appinfo.cpython-311.pyc,, -discord/types/__pycache__/audit_log.cpython-311.pyc,, -discord/types/__pycache__/automod.cpython-311.pyc,, -discord/types/__pycache__/channel.cpython-311.pyc,, -discord/types/__pycache__/command.cpython-311.pyc,, -discord/types/__pycache__/components.cpython-311.pyc,, -discord/types/__pycache__/embed.cpython-311.pyc,, -discord/types/__pycache__/emoji.cpython-311.pyc,, -discord/types/__pycache__/gateway.cpython-311.pyc,, -discord/types/__pycache__/guild.cpython-311.pyc,, -discord/types/__pycache__/integration.cpython-311.pyc,, -discord/types/__pycache__/interactions.cpython-311.pyc,, -discord/types/__pycache__/invite.cpython-311.pyc,, -discord/types/__pycache__/member.cpython-311.pyc,, -discord/types/__pycache__/message.cpython-311.pyc,, -discord/types/__pycache__/role.cpython-311.pyc,, -discord/types/__pycache__/scheduled_event.cpython-311.pyc,, -discord/types/__pycache__/snowflake.cpython-311.pyc,, -discord/types/__pycache__/sticker.cpython-311.pyc,, -discord/types/__pycache__/team.cpython-311.pyc,, -discord/types/__pycache__/template.cpython-311.pyc,, -discord/types/__pycache__/threads.cpython-311.pyc,, -discord/types/__pycache__/user.cpython-311.pyc,, -discord/types/__pycache__/voice.cpython-311.pyc,, -discord/types/__pycache__/webhook.cpython-311.pyc,, -discord/types/__pycache__/welcome_screen.cpython-311.pyc,, -discord/types/__pycache__/widget.cpython-311.pyc,, -discord/types/activity.py,sha256=k7tqp11m_t77Lu9D5T8pAMw1dnQcs7CtLJ5NV-4CgzQ,2707 -discord/types/appinfo.py,sha256=e0jA4OUxkjK439-tShxZWzqr1Ihx8Nz6ULhGYYEcb6k,2487 -discord/types/audit_log.py,sha256=cD8CEbC7bwBYJdrBSU6JuHo6_9sW6l7FQ4ZnGdraZLg,8192 -discord/types/automod.py,sha256=JxdWKf2evnFOuObcxnIi0QKzGpPOWsmKEx1nRJOwvQs,4009 -discord/types/channel.py,sha256=LHHLGBQpDju1043xDFjxhS5shBdnLnyQhNhxZk9PiVs,4804 -discord/types/command.py,sha256=1hcNoTUcJmpQy29ILcevQSxT8X2nxCngwubshVdgtyI,6266 -discord/types/components.py,sha256=b27cICDVSxXxHzadK7r8sipDn4fI5UCln2jPs33XKe0,3057 -discord/types/embed.py,sha256=VnYSZqjPIYXAGA87P-JhjXs_mGhswaMpOd81mzF5iPc,2329 -discord/types/emoji.py,sha256=JfwRzhcs7KRLKVtl8bCrvbgBTl9Ww8MboNoUcEf2EkM,1528 -discord/types/gateway.py,sha256=So8DWqEqVkqrlhbTSHrYTOcyfUNjRx3CiLVn2sR9yZ4,8453 -discord/types/guild.py,sha256=KikAryWEXz_cqhw1ZcmM-Hg7UytI1VgPswPRLEpQSB0,5279 -discord/types/integration.py,sha256=8HU48LNhr4eqXBzOkib3a5zEfxg1rvOTWp1IeERNpnw,2288 -discord/types/interactions.py,sha256=0VvwucoupXmCD0QGU1tBAT1xzlXcaMSt38h5_HG3gnQ,7046 -discord/types/invite.py,sha256=sLmvX0Imw5qtV-h78BYVf928Q0YIfGWVuz9XxY8Ctqg,2704 -discord/types/member.py,sha256=vOttPlR_PRpVKlQNnilcWjNEIAJBymVC6N0w08WZ248,1898 -discord/types/message.py,sha256=gCd0Hf9z-mWaxzzVJTn3ZITq2p3KKwgYRE9FYUbC9fY,4251 -discord/types/role.py,sha256=m1ZNSq4n-SNSWLELmBV2hE70Bo5YWuaovfLLhXBhL6U,1746 -discord/types/scheduled_event.py,sha256=WwSiBISWsOD81CVxBc2dwfnLSCaRPsZUdYFWyW8ouJI,3294 -discord/types/snowflake.py,sha256=x_L3OXauewQagDr6jPzj7uCw1MNqijsy-Uo9vvkwvF0,1182 -discord/types/sticker.py,sha256=WXQH2KaEAe0dK1tn0p0ZwxXV0OfWuxDGBEMQdBu0kr4,2257 -discord/types/team.py,sha256=rOgj3_h4UGMTSx9dgyBrYTX-1MQZdvPqpJgZKtnOOF4,1499 -discord/types/template.py,sha256=EO4tA2WypntCGQ9sWud5VS_uI8n12iVVHVPcNgCtZvM,1609 -discord/types/threads.py,sha256=p2VuQCd8NbAfIV3FUsG4IwHsd1uXk-rWQCfP22RQqNo,2454 -discord/types/user.py,sha256=eGW7Au7gGBbsQwod9ADXUdePArfZOgCM7kTTBf6rrkE,1572 -discord/types/voice.py,sha256=wFQLiPZaQuXBKE1dM_bdWNpkglpGFC7kEtghwCHBDQM,2268 -discord/types/webhook.py,sha256=iqBUgbz38o0_mWp-gcVI0_urpgZ_dq2EfOOXACg47u8,1978 -discord/types/welcome_screen.py,sha256=ukQMefZLHpgyg0_5IoAzN0UX40VayhF2vnJzNJNMZzA,1460 -discord/types/widget.py,sha256=YRkhDoBHbCEtkmYTVOSoHM8Y3mH9GIdowthoz8ISJYU,1883 -discord/ui/__init__.py,sha256=zsTlMLSiy6RzXzNVl1i5WDiM5piE_bgzCTLYU93axYM,285 -discord/ui/__pycache__/__init__.cpython-311.pyc,, -discord/ui/__pycache__/button.cpython-311.pyc,, -discord/ui/__pycache__/dynamic.cpython-311.pyc,, -discord/ui/__pycache__/item.cpython-311.pyc,, -discord/ui/__pycache__/modal.cpython-311.pyc,, -discord/ui/__pycache__/select.cpython-311.pyc,, -discord/ui/__pycache__/text_input.cpython-311.pyc,, -discord/ui/__pycache__/view.cpython-311.pyc,, -discord/ui/button.py,sha256=_5F4oEYfFwLcXeY056nZ_gp68zyPRDWgWh17klpos_E,10620 -discord/ui/dynamic.py,sha256=iWQ9-6JVT8HBdBY7utlJqK8fzC2BPzlgo_mE0TEA4VI,5899 -discord/ui/item.py,sha256=4Y-XTeYWsWrwriU1ihpVxVUOiCGTIgcnPP8wkjLdqCs,4372 -discord/ui/modal.py,sha256=VeziCgKCQTg8mei5cFlpv_5yoDyozKR2Cb4OoSZqzF4,7035 -discord/ui/select.py,sha256=yrjB41sDFXQEotsyXwqM1vASf7KF8qs7RIEYuDc0_d8,34030 -discord/ui/text_input.py,sha256=6vqvxCdDfMWoDNre-F69LeeoSugdL72x8jtOF8hW3Vc,8092 -discord/ui/view.py,sha256=KxJHGKb76_7mgfIfLhysPfQkRWSb_mUnAckXI3E-mFE,22878 -discord/user.py,sha256=ZZfKq97mG_w0Hf8Sli2E9zoUGawKdi32GC2aLhvxAbo,16504 -discord/utils.py,sha256=mWXsJSN3itR80f2BFtpwzgl_CuU2lG40CHrK89Ci5sI,41506 -discord/voice_client.py,sha256=dYywhW7CAztq0dnnUS3bG3OebUwLVTbndWEggnAUTLo,25089 -discord/webhook/__init__.py,sha256=5lx7IcCFf9DAjdX7CVen3-8DjHAfGvS1rSDY3DyVnqM,182 -discord/webhook/__pycache__/__init__.cpython-311.pyc,, -discord/webhook/__pycache__/async_.cpython-311.pyc,, -discord/webhook/__pycache__/sync.cpython-311.pyc,, -discord/webhook/async_.py,sha256=9NaW7OY2poMGZAja0CnQCdYlQEKagW0zraEPDTi7vH0,69729 -discord/webhook/sync.py,sha256=1sfj1QRohdA1NFBE7hMvfN0b38i0YXijzUHeNdfuQoE,42586 -discord/welcome_screen.py,sha256=CP5i3eujqzt_yVs0IBrTH_MOzTlEG0iid9xBvMvDQYk,7539 -discord/widget.py,sha256=FLqKcOyPRE6qbqgFEbhz0RJvl4DG7Ui7zzDnlp5SapI,10426 diff --git a/.venv/Lib/site-packages/discord.py-2.3.2.dist-info/REQUESTED b/.venv/Lib/site-packages/discord.py-2.3.2.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/.venv/Lib/site-packages/discord.py-2.3.2.dist-info/WHEEL b/.venv/Lib/site-packages/discord.py-2.3.2.dist-info/WHEEL deleted file mode 100644 index becc9a6..0000000 --- a/.venv/Lib/site-packages/discord.py-2.3.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.37.1) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/.venv/Lib/site-packages/discord.py-2.3.2.dist-info/top_level.txt b/.venv/Lib/site-packages/discord.py-2.3.2.dist-info/top_level.txt deleted file mode 100644 index e46fba2..0000000 --- a/.venv/Lib/site-packages/discord.py-2.3.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -discord diff --git a/.venv/Lib/site-packages/discord/__init__.py b/.venv/Lib/site-packages/discord/__init__.py deleted file mode 100644 index 7e927f4..0000000 --- a/.venv/Lib/site-packages/discord/__init__.py +++ /dev/null @@ -1,85 +0,0 @@ -""" -Discord API Wrapper -~~~~~~~~~~~~~~~~~~~ - -A basic wrapper for the Discord API. - -:copyright: (c) 2015-present Rapptz -:license: MIT, see LICENSE for more details. - -""" - -__title__ = 'discord' -__author__ = 'Rapptz' -__license__ = 'MIT' -__copyright__ = 'Copyright 2015-present Rapptz' -__version__ = '2.3.2' - -__path__ = __import__('pkgutil').extend_path(__path__, __name__) - -import logging -from typing import NamedTuple, Literal - -from .client import * -from .appinfo import * -from .user import * -from .emoji import * -from .partial_emoji import * -from .activity import * -from .channel import * -from .guild import * -from .flags import * -from .member import * -from .message import * -from .asset import * -from .errors import * -from .permissions import * -from .role import * -from .file import * -from .colour import * -from .integrations import * -from .invite import * -from .template import * -from .welcome_screen import * -from .widget import * -from .object import * -from .reaction import * -from . import ( - utils as utils, - opus as opus, - abc as abc, - ui as ui, - app_commands as app_commands, -) -from .enums import * -from .embeds import * -from .mentions import * -from .shard import * -from .player import * -from .webhook import * -from .voice_client import * -from .audit_logs import * -from .raw_models import * -from .team import * -from .sticker import * -from .stage_instance import * -from .scheduled_event import * -from .interactions import * -from .components import * -from .threads import * -from .automod import * - - -class VersionInfo(NamedTuple): - major: int - minor: int - micro: int - releaselevel: Literal["alpha", "beta", "candidate", "final"] - serial: int - - -version_info: VersionInfo = VersionInfo(major=2, minor=3, micro=2, releaselevel='final', serial=0) - -logging.getLogger(__name__).addHandler(logging.NullHandler()) - -del logging, NamedTuple, Literal, VersionInfo diff --git a/.venv/Lib/site-packages/discord/__main__.py b/.venv/Lib/site-packages/discord/__main__.py deleted file mode 100644 index 6e34be5..0000000 --- a/.venv/Lib/site-packages/discord/__main__.py +++ /dev/null @@ -1,351 +0,0 @@ -""" -The MIT License (MIT) - -Copyright (c) 2015-present Rapptz - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the "Software"), -to deal in the Software without restriction, including without limitation -the rights to use, copy, modify, merge, publish, distribute, sublicense, -and/or sell copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. -""" - -from __future__ import annotations - -from typing import Optional, Tuple, Dict - -import argparse -import sys -from pathlib import Path - -import discord -import importlib.metadata -import aiohttp -import platform - - -def show_version() -> None: - entries = [] - - entries.append('- Python v{0.major}.{0.minor}.{0.micro}-{0.releaselevel}'.format(sys.version_info)) - version_info = discord.version_info - entries.append('- discord.py v{0.major}.{0.minor}.{0.micro}-{0.releaselevel}'.format(version_info)) - if version_info.releaselevel != 'final': - version = importlib.metadata.version('discord.py') - if version: - entries.append(f' - discord.py metadata: v{version}') - - entries.append(f'- aiohttp v{aiohttp.__version__}') - uname = platform.uname() - entries.append('- system info: {0.system} {0.release} {0.version}'.format(uname)) - print('\n'.join(entries)) - - -def core(parser: argparse.ArgumentParser, args: argparse.Namespace) -> None: - if args.version: - show_version() - else: - parser.print_help() - - -_bot_template = """#!/usr/bin/env python3 - -from discord.ext import commands -import discord -import config - -class Bot(commands.{base}): - def __init__(self, intents: discord.Intents, **kwargs): - super().__init__(command_prefix=commands.when_mentioned_or('{prefix}'), intents=intents, **kwargs) - - async def setup_hook(self): - for cog in config.cogs: - try: - await self.load_extension(cog) - except Exception as exc: - print(f'Could not load extension {{cog}} due to {{exc.__class__.__name__}}: {{exc}}') - - async def on_ready(self): - print(f'Logged on as {{self.user}} (ID: {{self.user.id}})') - - -intents = discord.Intents.default() -intents.message_content = True -bot = Bot(intents=intents) - -# write general commands here - -bot.run(config.token) -""" - -_gitignore_template = """# Byte-compiled / optimized / DLL files -__pycache__/ -*.py[cod] -*$py.class - -# C extensions -*.so - -# Distribution / packaging -.Python -env/ -build/ -develop-eggs/ -dist/ -downloads/ -eggs/ -.eggs/ -lib/ -lib64/ -parts/ -sdist/ -var/ -*.egg-info/ -.installed.cfg -*.egg - -# Our configuration files -config.py -""" - -_cog_template = '''from discord.ext import commands -import discord - -class {name}(commands.Cog{attrs}): - """The description for {name} goes here.""" - - def __init__(self, bot): - self.bot = bot -{extra} -async def setup(bot): - await bot.add_cog({name}(bot)) -''' - -_cog_extras = ''' - async def cog_load(self): - # loading logic goes here - pass - - async def cog_unload(self): - # clean up logic goes here - pass - - async def cog_check(self, ctx): - # checks that apply to every command in here - return True - - async def bot_check(self, ctx): - # checks that apply to every command to the bot - return True - - async def bot_check_once(self, ctx): - # check that apply to every command but is guaranteed to be called only once - return True - - async def cog_command_error(self, ctx, error): - # error handling to every command in here - pass - - async def cog_app_command_error(self, interaction, error): - # error handling to every application command in here - pass - - async def cog_before_invoke(self, ctx): - # called before a command is called here - pass - - async def cog_after_invoke(self, ctx): - # called after a command is called here - pass - -''' - - -# certain file names and directory names are forbidden -# see: https://msdn.microsoft.com/en-us/library/windows/desktop/aa365247%28v=vs.85%29.aspx -# although some of this doesn't apply to Linux, we might as well be consistent -_base_table: Dict[str, Optional[str]] = { - '<': '-', - '>': '-', - ':': '-', - '"': '-', - # '/': '-', these are fine - # '\\': '-', - '|': '-', - '?': '-', - '*': '-', -} - -# NUL (0) and 1-31 are disallowed -_base_table.update((chr(i), None) for i in range(32)) - -_translation_table = str.maketrans(_base_table) - - -def to_path(parser: argparse.ArgumentParser, name: str, *, replace_spaces: bool = False) -> Path: - if isinstance(name, Path): - return name - - if sys.platform == 'win32': - forbidden = ( - 'CON', - 'PRN', - 'AUX', - 'NUL', - 'COM1', - 'COM2', - 'COM3', - 'COM4', - 'COM5', - 'COM6', - 'COM7', - 'COM8', - 'COM9', - 'LPT1', - 'LPT2', - 'LPT3', - 'LPT4', - 'LPT5', - 'LPT6', - 'LPT7', - 'LPT8', - 'LPT9', - ) - if len(name) <= 4 and name.upper() in forbidden: - parser.error('invalid directory name given, use a different one') - - name = name.translate(_translation_table) - if replace_spaces: - name = name.replace(' ', '-') - return Path(name) - - -def newbot(parser: argparse.ArgumentParser, args: argparse.Namespace) -> None: - new_directory = to_path(parser, args.directory) / to_path(parser, args.name) - - # as a note exist_ok for Path is a 3.5+ only feature - # since we already checked above that we're >3.5 - try: - new_directory.mkdir(exist_ok=True, parents=True) - except OSError as exc: - parser.error(f'could not create our bot directory ({exc})') - - cogs = new_directory / 'cogs' - - try: - cogs.mkdir(exist_ok=True) - init = cogs / '__init__.py' - init.touch() - except OSError as exc: - print(f'warning: could not create cogs directory ({exc})') - - try: - with open(str(new_directory / 'config.py'), 'w', encoding='utf-8') as fp: - fp.write('token = "place your token here"\ncogs = []\n') - except OSError as exc: - parser.error(f'could not create config file ({exc})') - - try: - with open(str(new_directory / 'bot.py'), 'w', encoding='utf-8') as fp: - base = 'Bot' if not args.sharded else 'AutoShardedBot' - fp.write(_bot_template.format(base=base, prefix=args.prefix)) - except OSError as exc: - parser.error(f'could not create bot file ({exc})') - - if not args.no_git: - try: - with open(str(new_directory / '.gitignore'), 'w', encoding='utf-8') as fp: - fp.write(_gitignore_template) - except OSError as exc: - print(f'warning: could not create .gitignore file ({exc})') - - print('successfully made bot at', new_directory) - - -def newcog(parser: argparse.ArgumentParser, args: argparse.Namespace) -> None: - cog_dir = to_path(parser, args.directory) - try: - cog_dir.mkdir(exist_ok=True) - except OSError as exc: - print(f'warning: could not create cogs directory ({exc})') - - directory = cog_dir / to_path(parser, args.name) - directory = directory.with_suffix('.py') - try: - with open(str(directory), 'w', encoding='utf-8') as fp: - attrs = '' - extra = _cog_extras if args.full else '' - if args.class_name: - name = args.class_name - else: - name = str(directory.stem) - if '-' in name or '_' in name: - translation = str.maketrans('-_', ' ') - name = name.translate(translation).title().replace(' ', '') - else: - name = name.title() - - if args.display_name: - attrs += f', name="{args.display_name}"' - if args.hide_commands: - attrs += ', command_attrs=dict(hidden=True)' - fp.write(_cog_template.format(name=name, extra=extra, attrs=attrs)) - except OSError as exc: - parser.error(f'could not create cog file ({exc})') - else: - print('successfully made cog at', directory) - - -def add_newbot_args(subparser: argparse._SubParsersAction[argparse.ArgumentParser]) -> None: - parser = subparser.add_parser('newbot', help='creates a command bot project quickly') - parser.set_defaults(func=newbot) - - parser.add_argument('name', help='the bot project name') - parser.add_argument('directory', help='the directory to place it in (default: .)', nargs='?', default=Path.cwd()) - parser.add_argument('--prefix', help='the bot prefix (default: $)', default='$', metavar='') - parser.add_argument('--sharded', help='whether to use AutoShardedBot', action='store_true') - parser.add_argument('--no-git', help='do not create a .gitignore file', action='store_true', dest='no_git') - - -def add_newcog_args(subparser: argparse._SubParsersAction[argparse.ArgumentParser]) -> None: - parser = subparser.add_parser('newcog', help='creates a new cog template quickly') - parser.set_defaults(func=newcog) - - parser.add_argument('name', help='the cog name') - parser.add_argument('directory', help='the directory to place it in (default: cogs)', nargs='?', default=Path('cogs')) - parser.add_argument('--class-name', help='the class name of the cog (default: )', dest='class_name') - parser.add_argument('--display-name', help='the cog name (default: )') - parser.add_argument('--hide-commands', help='whether to hide all commands in the cog', action='store_true') - parser.add_argument('--full', help='add all special methods as well', action='store_true') - - -def parse_args() -> Tuple[argparse.ArgumentParser, argparse.Namespace]: - parser = argparse.ArgumentParser(prog='discord', description='Tools for helping with discord.py') - parser.add_argument('-v', '--version', action='store_true', help='shows the library version') - parser.set_defaults(func=core) - - subparser = parser.add_subparsers(dest='subcommand', title='subcommands') - add_newbot_args(subparser) - add_newcog_args(subparser) - return parser, parser.parse_args() - - -def main() -> None: - parser, args = parse_args() - args.func(parser, args) - - -if __name__ == '__main__': - main() diff --git a/.venv/Lib/site-packages/discord/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/__init__.cpython-311.pyc deleted file mode 100644 index 073d13e..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/__init__.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/__main__.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/__main__.cpython-311.pyc deleted file mode 100644 index d8ecb27..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/__main__.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/_types.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/_types.cpython-311.pyc deleted file mode 100644 index 490fd54..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/_types.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/abc.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/abc.cpython-311.pyc deleted file mode 100644 index 389cb54..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/abc.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/activity.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/activity.cpython-311.pyc deleted file mode 100644 index f9f7244..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/activity.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/appinfo.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/appinfo.cpython-311.pyc deleted file mode 100644 index 2d48d8d..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/appinfo.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/asset.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/asset.cpython-311.pyc deleted file mode 100644 index 16bcad2..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/asset.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/audit_logs.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/audit_logs.cpython-311.pyc deleted file mode 100644 index 4b73524..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/audit_logs.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/automod.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/automod.cpython-311.pyc deleted file mode 100644 index 7ac28c5..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/automod.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/backoff.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/backoff.cpython-311.pyc deleted file mode 100644 index 94a5a0f..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/backoff.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/channel.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/channel.cpython-311.pyc deleted file mode 100644 index a570941..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/channel.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/client.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/client.cpython-311.pyc deleted file mode 100644 index 48dc2bc..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/client.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/colour.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/colour.cpython-311.pyc deleted file mode 100644 index 9b8fe0f..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/colour.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/components.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/components.cpython-311.pyc deleted file mode 100644 index 63e4814..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/components.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/context_managers.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/context_managers.cpython-311.pyc deleted file mode 100644 index 4f42ff6..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/context_managers.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/embeds.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/embeds.cpython-311.pyc deleted file mode 100644 index b44218f..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/embeds.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/emoji.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/emoji.cpython-311.pyc deleted file mode 100644 index af755cb..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/emoji.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/enums.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/enums.cpython-311.pyc deleted file mode 100644 index f99de6c..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/enums.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/errors.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/errors.cpython-311.pyc deleted file mode 100644 index 1dd2abf..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/errors.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/file.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/file.cpython-311.pyc deleted file mode 100644 index 9140a7c..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/file.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/flags.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/flags.cpython-311.pyc deleted file mode 100644 index 5a95025..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/flags.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/gateway.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/gateway.cpython-311.pyc deleted file mode 100644 index 9d1637b..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/gateway.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/guild.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/guild.cpython-311.pyc deleted file mode 100644 index dcb4419..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/guild.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/http.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/http.cpython-311.pyc deleted file mode 100644 index 9710740..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/http.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/integrations.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/integrations.cpython-311.pyc deleted file mode 100644 index 2aacd3d..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/integrations.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/interactions.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/interactions.cpython-311.pyc deleted file mode 100644 index 288ab40..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/interactions.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/invite.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/invite.cpython-311.pyc deleted file mode 100644 index a74d965..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/invite.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/member.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/member.cpython-311.pyc deleted file mode 100644 index 023bfc0..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/member.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/mentions.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/mentions.cpython-311.pyc deleted file mode 100644 index bb5f65b..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/mentions.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/message.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/message.cpython-311.pyc deleted file mode 100644 index 5187d4d..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/message.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/mixins.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/mixins.cpython-311.pyc deleted file mode 100644 index 9755b7f..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/mixins.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/object.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/object.cpython-311.pyc deleted file mode 100644 index bfc1aef..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/object.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/oggparse.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/oggparse.cpython-311.pyc deleted file mode 100644 index d7deee7..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/oggparse.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/opus.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/opus.cpython-311.pyc deleted file mode 100644 index a24dce6..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/opus.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/partial_emoji.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/partial_emoji.cpython-311.pyc deleted file mode 100644 index 3b724b9..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/partial_emoji.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/permissions.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/permissions.cpython-311.pyc deleted file mode 100644 index a5d3873..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/permissions.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/player.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/player.cpython-311.pyc deleted file mode 100644 index 6111ff5..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/player.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/raw_models.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/raw_models.cpython-311.pyc deleted file mode 100644 index 6eb308d..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/raw_models.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/reaction.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/reaction.cpython-311.pyc deleted file mode 100644 index c84461d..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/reaction.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/role.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/role.cpython-311.pyc deleted file mode 100644 index f4bbe64..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/role.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/scheduled_event.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/scheduled_event.cpython-311.pyc deleted file mode 100644 index 64cc4dd..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/scheduled_event.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/shard.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/shard.cpython-311.pyc deleted file mode 100644 index 5089469..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/shard.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/stage_instance.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/stage_instance.cpython-311.pyc deleted file mode 100644 index 7997acd..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/stage_instance.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/state.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/state.cpython-311.pyc deleted file mode 100644 index bb7258c..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/state.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/sticker.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/sticker.cpython-311.pyc deleted file mode 100644 index 300f3b5..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/sticker.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/team.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/team.cpython-311.pyc deleted file mode 100644 index 8f5633f..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/team.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/template.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/template.cpython-311.pyc deleted file mode 100644 index 5257ab6..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/template.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/threads.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/threads.cpython-311.pyc deleted file mode 100644 index 4316281..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/threads.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/user.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/user.cpython-311.pyc deleted file mode 100644 index 4b5b3bc..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/user.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/utils.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/utils.cpython-311.pyc deleted file mode 100644 index d32c768..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/utils.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/voice_client.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/voice_client.cpython-311.pyc deleted file mode 100644 index 9499a3a..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/voice_client.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/welcome_screen.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/welcome_screen.cpython-311.pyc deleted file mode 100644 index 9ebe60e..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/welcome_screen.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/__pycache__/widget.cpython-311.pyc b/.venv/Lib/site-packages/discord/__pycache__/widget.cpython-311.pyc deleted file mode 100644 index a8c48f8..0000000 Binary files a/.venv/Lib/site-packages/discord/__pycache__/widget.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/_types.py b/.venv/Lib/site-packages/discord/_types.py deleted file mode 100644 index 3310635..0000000 --- a/.venv/Lib/site-packages/discord/_types.py +++ /dev/null @@ -1,34 +0,0 @@ -""" -The MIT License (MIT) - -Copyright (c) 2015-present Rapptz - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the "Software"), -to deal in the Software without restriction, including without limitation -the rights to use, copy, modify, merge, publish, distribute, sublicense, -and/or sell copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. -""" - -from __future__ import annotations -from typing import TypeVar, TYPE_CHECKING - -if TYPE_CHECKING: - from typing_extensions import TypeVar - from .client import Client - - ClientT = TypeVar('ClientT', bound=Client, covariant=True, default=Client) -else: - ClientT = TypeVar('ClientT', bound='Client', covariant=True) diff --git a/.venv/Lib/site-packages/discord/abc.py b/.venv/Lib/site-packages/discord/abc.py deleted file mode 100644 index f409188..0000000 --- a/.venv/Lib/site-packages/discord/abc.py +++ /dev/null @@ -1,1915 +0,0 @@ -""" -The MIT License (MIT) - -Copyright (c) 2015-present Rapptz - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the "Software"), -to deal in the Software without restriction, including without limitation -the rights to use, copy, modify, merge, publish, distribute, sublicense, -and/or sell copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. -""" - -from __future__ import annotations - -import copy -import time -import asyncio -from datetime import datetime -from typing import ( - Any, - AsyncIterator, - Callable, - Dict, - Iterable, - List, - Optional, - TYPE_CHECKING, - Protocol, - Sequence, - Tuple, - TypeVar, - Union, - overload, - runtime_checkable, -) - -from .object import OLDEST_OBJECT, Object -from .context_managers import Typing -from .enums import ChannelType, InviteTarget -from .errors import ClientException -from .mentions import AllowedMentions -from .permissions import PermissionOverwrite, Permissions -from .role import Role -from .invite import Invite -from .file import File -from .http import handle_message_parameters -from .voice_client import VoiceClient, VoiceProtocol -from .sticker import GuildSticker, StickerItem -from . import utils - -__all__ = ( - 'Snowflake', - 'User', - 'PrivateChannel', - 'GuildChannel', - 'Messageable', - 'Connectable', -) - -T = TypeVar('T', bound=VoiceProtocol) - -if TYPE_CHECKING: - from typing_extensions import Self - - from .client import Client - from .user import ClientUser - from .asset import Asset - from .state import ConnectionState - from .guild import Guild - from .member import Member - from .channel import CategoryChannel - from .embeds import Embed - from .message import Message, MessageReference, PartialMessage - from .channel import ( - TextChannel, - DMChannel, - GroupChannel, - PartialMessageable, - VocalGuildChannel, - VoiceChannel, - StageChannel, - ) - from .threads import Thread - from .ui.view import View - from .types.channel import ( - PermissionOverwrite as PermissionOverwritePayload, - Channel as ChannelPayload, - GuildChannel as GuildChannelPayload, - OverwriteType, - ) - from .types.snowflake import ( - SnowflakeList, - ) - - PartialMessageableChannel = Union[TextChannel, VoiceChannel, StageChannel, Thread, DMChannel, PartialMessageable] - MessageableChannel = Union[PartialMessageableChannel, GroupChannel] - SnowflakeTime = Union["Snowflake", datetime] - -MISSING = utils.MISSING - - -class _Undefined: - def __repr__(self) -> str: - return 'see-below' - - -_undefined: Any = _Undefined() - - -async def _single_delete_strategy(messages: Iterable[Message], *, reason: Optional[str] = None): - for m in messages: - await m.delete() - - -async def _purge_helper( - channel: Union[Thread, TextChannel, VocalGuildChannel], - *, - limit: Optional[int] = 100, - check: Callable[[Message], bool] = MISSING, - before: Optional[SnowflakeTime] = None, - after: Optional[SnowflakeTime] = None, - around: Optional[SnowflakeTime] = None, - oldest_first: Optional[bool] = None, - bulk: bool = True, - reason: Optional[str] = None, -) -> List[Message]: - if check is MISSING: - check = lambda m: True - - iterator = channel.history(limit=limit, before=before, after=after, oldest_first=oldest_first, around=around) - ret: List[Message] = [] - count = 0 - - minimum_time = int((time.time() - 14 * 24 * 60 * 60) * 1000.0 - 1420070400000) << 22 - strategy = channel.delete_messages if bulk else _single_delete_strategy - - async for message in iterator: - if count == 100: - to_delete = ret[-100:] - await strategy(to_delete, reason=reason) - count = 0 - await asyncio.sleep(1) - - if not check(message): - continue - - if message.id < minimum_time: - # older than 14 days old - if count == 1: - await ret[-1].delete() - elif count >= 2: - to_delete = ret[-count:] - await strategy(to_delete, reason=reason) - - count = 0 - strategy = _single_delete_strategy - - count += 1 - ret.append(message) - - # Some messages remaining to poll - if count >= 2: - # more than 2 messages -> bulk delete - to_delete = ret[-count:] - await strategy(to_delete, reason=reason) - elif count == 1: - # delete a single message - await ret[-1].delete() - - return ret - - -@runtime_checkable -class Snowflake(Protocol): - """An ABC that details the common operations on a Discord model. - - Almost all :ref:`Discord models ` meet this - abstract base class. - - If you want to create a snowflake on your own, consider using - :class:`.Object`. - - Attributes - ----------- - id: :class:`int` - The model's unique ID. - """ - - id: int - - -@runtime_checkable -class User(Snowflake, Protocol): - """An ABC that details the common operations on a Discord user. - - The following implement this ABC: - - - :class:`~discord.User` - - :class:`~discord.ClientUser` - - :class:`~discord.Member` - - This ABC must also implement :class:`~discord.abc.Snowflake`. - - Attributes - ----------- - name: :class:`str` - The user's username. - discriminator: :class:`str` - The user's discriminator. This is a legacy concept that is no longer used. - global_name: Optional[:class:`str`] - The user's global nickname. - bot: :class:`bool` - If the user is a bot account. - system: :class:`bool` - If the user is a system account. - """ - - name: str - discriminator: str - global_name: Optional[str] - bot: bool - system: bool - - @property - def display_name(self) -> str: - """:class:`str`: Returns the user's display name.""" - raise NotImplementedError - - @property - def mention(self) -> str: - """:class:`str`: Returns a string that allows you to mention the given user.""" - raise NotImplementedError - - @property - def avatar(self) -> Optional[Asset]: - """Optional[:class:`~discord.Asset`]: Returns an Asset that represents the user's avatar, if present.""" - raise NotImplementedError - - @property - def default_avatar(self) -> Asset: - """:class:`~discord.Asset`: Returns the default avatar for a given user.""" - raise NotImplementedError - - @property - def display_avatar(self) -> Asset: - """:class:`~discord.Asset`: Returns the user's display avatar. - - For regular users this is just their default avatar or uploaded avatar. - - .. versionadded:: 2.0 - """ - raise NotImplementedError - - def mentioned_in(self, message: Message) -> bool: - """Checks if the user is mentioned in the specified message. - - Parameters - ----------- - message: :class:`~discord.Message` - The message to check if you're mentioned in. - - Returns - ------- - :class:`bool` - Indicates if the user is mentioned in the message. - """ - raise NotImplementedError - - -class PrivateChannel: - """An ABC that details the common operations on a private Discord channel. - - The following implement this ABC: - - - :class:`~discord.DMChannel` - - :class:`~discord.GroupChannel` - - This ABC must also implement :class:`~discord.abc.Snowflake`. - - Attributes - ----------- - me: :class:`~discord.ClientUser` - The user presenting yourself. - """ - - __slots__ = () - - id: int - me: ClientUser - - -class _Overwrites: - __slots__ = ('id', 'allow', 'deny', 'type') - - ROLE = 0 - MEMBER = 1 - - def __init__(self, data: PermissionOverwritePayload) -> None: - self.id: int = int(data['id']) - self.allow: int = int(data.get('allow', 0)) - self.deny: int = int(data.get('deny', 0)) - self.type: OverwriteType = data['type'] - - def _asdict(self) -> PermissionOverwritePayload: - return { - 'id': self.id, - 'allow': str(self.allow), - 'deny': str(self.deny), - 'type': self.type, - } - - def is_role(self) -> bool: - return self.type == 0 - - def is_member(self) -> bool: - return self.type == 1 - - -class GuildChannel: - """An ABC that details the common operations on a Discord guild channel. - - The following implement this ABC: - - - :class:`~discord.TextChannel` - - :class:`~discord.VoiceChannel` - - :class:`~discord.CategoryChannel` - - :class:`~discord.StageChannel` - - :class:`~discord.ForumChannel` - - This ABC must also implement :class:`~discord.abc.Snowflake`. - - Attributes - ----------- - name: :class:`str` - The channel name. - guild: :class:`~discord.Guild` - The guild the channel belongs to. - position: :class:`int` - The position in the channel list. This is a number that starts at 0. - e.g. the top channel is position 0. - """ - - __slots__ = () - - id: int - name: str - guild: Guild - type: ChannelType - position: int - category_id: Optional[int] - _state: ConnectionState - _overwrites: List[_Overwrites] - - if TYPE_CHECKING: - - def __init__(self, *, state: ConnectionState, guild: Guild, data: GuildChannelPayload): - ... - - def __str__(self) -> str: - return self.name - - @property - def _sorting_bucket(self) -> int: - raise NotImplementedError - - def _update(self, guild: Guild, data: Dict[str, Any]) -> None: - raise NotImplementedError - - async def _move( - self, - position: int, - parent_id: Optional[Any] = None, - lock_permissions: bool = False, - *, - reason: Optional[str], - ) -> None: - if position < 0: - raise ValueError('Channel position cannot be less than 0.') - - http = self._state.http - bucket = self._sorting_bucket - channels: List[GuildChannel] = [c for c in self.guild.channels if c._sorting_bucket == bucket] - - channels.sort(key=lambda c: c.position) - - try: - # remove ourselves from the channel list - channels.remove(self) - except ValueError: - # not there somehow lol - return - else: - index = next((i for i, c in enumerate(channels) if c.position >= position), len(channels)) - # add ourselves at our designated position - channels.insert(index, self) - - payload = [] - for index, c in enumerate(channels): - d: Dict[str, Any] = {'id': c.id, 'position': index} - if parent_id is not _undefined and c.id == self.id: - d.update(parent_id=parent_id, lock_permissions=lock_permissions) - payload.append(d) - - await http.bulk_channel_update(self.guild.id, payload, reason=reason) - - async def _edit(self, options: Dict[str, Any], reason: Optional[str]) -> Optional[ChannelPayload]: - try: - parent = options.pop('category') - except KeyError: - parent_id = _undefined - else: - parent_id = parent and parent.id - - try: - options['rate_limit_per_user'] = options.pop('slowmode_delay') - except KeyError: - pass - - try: - options['default_thread_rate_limit_per_user'] = options.pop('default_thread_slowmode_delay') - except KeyError: - pass - - try: - rtc_region = options.pop('rtc_region') - except KeyError: - pass - else: - options['rtc_region'] = None if rtc_region is None else str(rtc_region) - - try: - video_quality_mode = options.pop('video_quality_mode') - except KeyError: - pass - else: - options['video_quality_mode'] = int(video_quality_mode) - - lock_permissions = options.pop('sync_permissions', False) - - try: - position = options.pop('position') - except KeyError: - if parent_id is not _undefined: - if lock_permissions: - category = self.guild.get_channel(parent_id) - if category: - options['permission_overwrites'] = [c._asdict() for c in category._overwrites] - options['parent_id'] = parent_id - elif lock_permissions and self.category_id is not None: - # if we're syncing permissions on a pre-existing channel category without changing it - # we need to update the permissions to point to the pre-existing category - category = self.guild.get_channel(self.category_id) - if category: - options['permission_overwrites'] = [c._asdict() for c in category._overwrites] - else: - await self._move(position, parent_id=parent_id, lock_permissions=lock_permissions, reason=reason) - - overwrites = options.get('overwrites', None) - if overwrites is not None: - perms = [] - for target, perm in overwrites.items(): - if not isinstance(perm, PermissionOverwrite): - raise TypeError(f'Expected PermissionOverwrite received {perm.__class__.__name__}') - - allow, deny = perm.pair() - payload = { - 'allow': allow.value, - 'deny': deny.value, - 'id': target.id, - } - - if isinstance(target, Role): - payload['type'] = _Overwrites.ROLE - elif isinstance(target, Object): - payload['type'] = _Overwrites.ROLE if target.type is Role else _Overwrites.MEMBER - else: - payload['type'] = _Overwrites.MEMBER - - perms.append(payload) - options['permission_overwrites'] = perms - - try: - ch_type = options['type'] - except KeyError: - pass - else: - if not isinstance(ch_type, ChannelType): - raise TypeError('type field must be of type ChannelType') - options['type'] = ch_type.value - - if options: - return await self._state.http.edit_channel(self.id, reason=reason, **options) - - def _fill_overwrites(self, data: GuildChannelPayload) -> None: - self._overwrites = [] - everyone_index = 0 - everyone_id = self.guild.id - - for index, overridden in enumerate(data.get('permission_overwrites', [])): - overwrite = _Overwrites(overridden) - self._overwrites.append(overwrite) - - if overwrite.type == _Overwrites.MEMBER: - continue - - if overwrite.id == everyone_id: - # the @everyone role is not guaranteed to be the first one - # in the list of permission overwrites, however the permission - # resolution code kind of requires that it is the first one in - # the list since it is special. So we need the index so we can - # swap it to be the first one. - everyone_index = index - - # do the swap - tmp = self._overwrites - if tmp: - tmp[everyone_index], tmp[0] = tmp[0], tmp[everyone_index] - - @property - def changed_roles(self) -> List[Role]: - """List[:class:`~discord.Role`]: Returns a list of roles that have been overridden from - their default values in the :attr:`~discord.Guild.roles` attribute.""" - ret = [] - g = self.guild - for overwrite in filter(lambda o: o.is_role(), self._overwrites): - role = g.get_role(overwrite.id) - if role is None: - continue - - role = copy.copy(role) - role.permissions.handle_overwrite(overwrite.allow, overwrite.deny) - ret.append(role) - return ret - - @property - def mention(self) -> str: - """:class:`str`: The string that allows you to mention the channel.""" - return f'<#{self.id}>' - - @property - def jump_url(self) -> str: - """:class:`str`: Returns a URL that allows the client to jump to the channel. - - .. versionadded:: 2.0 - """ - return f'https://discord.com/channels/{self.guild.id}/{self.id}' - - @property - def created_at(self) -> datetime: - """:class:`datetime.datetime`: Returns the channel's creation time in UTC.""" - return utils.snowflake_time(self.id) - - def overwrites_for(self, obj: Union[Role, User, Object]) -> PermissionOverwrite: - """Returns the channel-specific overwrites for a member or a role. - - Parameters - ----------- - obj: Union[:class:`~discord.Role`, :class:`~discord.abc.User`, :class:`~discord.Object`] - The role or user denoting whose overwrite to get. - - Returns - --------- - :class:`~discord.PermissionOverwrite` - The permission overwrites for this object. - """ - - if isinstance(obj, User): - predicate = lambda p: p.is_member() - elif isinstance(obj, Role): - predicate = lambda p: p.is_role() - else: - predicate = lambda p: True - - for overwrite in filter(predicate, self._overwrites): - if overwrite.id == obj.id: - allow = Permissions(overwrite.allow) - deny = Permissions(overwrite.deny) - return PermissionOverwrite.from_pair(allow, deny) - - return PermissionOverwrite() - - @property - def overwrites(self) -> Dict[Union[Role, Member, Object], PermissionOverwrite]: - """Returns all of the channel's overwrites. - - This is returned as a dictionary where the key contains the target which - can be either a :class:`~discord.Role` or a :class:`~discord.Member` and the value is the - overwrite as a :class:`~discord.PermissionOverwrite`. - - .. versionchanged:: 2.0 - Overwrites can now be type-aware :class:`~discord.Object` in case of cache lookup failure - - Returns - -------- - Dict[Union[:class:`~discord.Role`, :class:`~discord.Member`, :class:`~discord.Object`], :class:`~discord.PermissionOverwrite`] - The channel's permission overwrites. - """ - ret = {} - for ow in self._overwrites: - allow = Permissions(ow.allow) - deny = Permissions(ow.deny) - overwrite = PermissionOverwrite.from_pair(allow, deny) - target = None - - if ow.is_role(): - target = self.guild.get_role(ow.id) - elif ow.is_member(): - target = self.guild.get_member(ow.id) - - if target is None: - target_type = Role if ow.is_role() else User - target = Object(id=ow.id, type=target_type) # type: ignore - - ret[target] = overwrite - return ret - - @property - def category(self) -> Optional[CategoryChannel]: - """Optional[:class:`~discord.CategoryChannel`]: The category this channel belongs to. - - If there is no category then this is ``None``. - """ - return self.guild.get_channel(self.category_id) # type: ignore # These are coerced into CategoryChannel - - @property - def permissions_synced(self) -> bool: - """:class:`bool`: Whether or not the permissions for this channel are synced with the - category it belongs to. - - If there is no category then this is ``False``. - - .. versionadded:: 1.3 - """ - if self.category_id is None: - return False - - category = self.guild.get_channel(self.category_id) - return bool(category and category.overwrites == self.overwrites) - - def _apply_implicit_permissions(self, base: Permissions) -> None: - # if you can't send a message in a channel then you can't have certain - # permissions as well - if not base.send_messages: - base.send_tts_messages = False - base.mention_everyone = False - base.embed_links = False - base.attach_files = False - - # if you can't read a channel then you have no permissions there - if not base.read_messages: - denied = Permissions.all_channel() - base.value &= ~denied.value - - def permissions_for(self, obj: Union[Member, Role], /) -> Permissions: - """Handles permission resolution for the :class:`~discord.Member` - or :class:`~discord.Role`. - - This function takes into consideration the following cases: - - - Guild owner - - Guild roles - - Channel overrides - - Member overrides - - Implicit permissions - - Member timeout - - If a :class:`~discord.Role` is passed, then it checks the permissions - someone with that role would have, which is essentially: - - - The default role permissions - - The permissions of the role used as a parameter - - The default role permission overwrites - - The permission overwrites of the role used as a parameter - - .. versionchanged:: 2.0 - The object passed in can now be a role object. - - .. versionchanged:: 2.0 - ``obj`` parameter is now positional-only. - - Parameters - ---------- - obj: Union[:class:`~discord.Member`, :class:`~discord.Role`] - The object to resolve permissions for. This could be either - a member or a role. If it's a role then member overwrites - are not computed. - - Returns - ------- - :class:`~discord.Permissions` - The resolved permissions for the member or role. - """ - - # The current cases can be explained as: - # Guild owner get all permissions -- no questions asked. Otherwise... - # The @everyone role gets the first application. - # After that, the applied roles that the user has in the channel - # (or otherwise) are then OR'd together. - # After the role permissions are resolved, the member permissions - # have to take into effect. - # After all that is done.. you have to do the following: - - # If manage permissions is True, then all permissions are set to True. - - # The operation first takes into consideration the denied - # and then the allowed. - - if self.guild.owner_id == obj.id: - return Permissions.all() - - default = self.guild.default_role - base = Permissions(default.permissions.value) - - # Handle the role case first - if isinstance(obj, Role): - base.value |= obj._permissions - - if base.administrator: - return Permissions.all() - - # Apply @everyone allow/deny first since it's special - try: - maybe_everyone = self._overwrites[0] - if maybe_everyone.id == self.guild.id: - base.handle_overwrite(allow=maybe_everyone.allow, deny=maybe_everyone.deny) - except IndexError: - pass - - if obj.is_default(): - return base - - overwrite = utils.get(self._overwrites, type=_Overwrites.ROLE, id=obj.id) - if overwrite is not None: - base.handle_overwrite(overwrite.allow, overwrite.deny) - - return base - - roles = obj._roles - get_role = self.guild.get_role - - # Apply guild roles that the member has. - for role_id in roles: - role = get_role(role_id) - if role is not None: - base.value |= role._permissions - - # Guild-wide Administrator -> True for everything - # Bypass all channel-specific overrides - if base.administrator: - return Permissions.all() - - # Apply @everyone allow/deny first since it's special - try: - maybe_everyone = self._overwrites[0] - if maybe_everyone.id == self.guild.id: - base.handle_overwrite(allow=maybe_everyone.allow, deny=maybe_everyone.deny) - remaining_overwrites = self._overwrites[1:] - else: - remaining_overwrites = self._overwrites - except IndexError: - remaining_overwrites = self._overwrites - - denies = 0 - allows = 0 - - # Apply channel specific role permission overwrites - for overwrite in remaining_overwrites: - if overwrite.is_role() and roles.has(overwrite.id): - denies |= overwrite.deny - allows |= overwrite.allow - - base.handle_overwrite(allow=allows, deny=denies) - - # Apply member specific permission overwrites - for overwrite in remaining_overwrites: - if overwrite.is_member() and overwrite.id == obj.id: - base.handle_overwrite(allow=overwrite.allow, deny=overwrite.deny) - break - - if obj.is_timed_out(): - # Timeout leads to every permission except VIEW_CHANNEL and READ_MESSAGE_HISTORY - # being explicitly denied - # N.B.: This *must* come last, because it's a conclusive mask - base.value &= Permissions._timeout_mask() - - return base - - async def delete(self, *, reason: Optional[str] = None) -> None: - """|coro| - - Deletes the channel. - - You must have :attr:`~discord.Permissions.manage_channels` to do this. - - Parameters - ----------- - reason: Optional[:class:`str`] - The reason for deleting this channel. - Shows up on the audit log. - - Raises - ------- - ~discord.Forbidden - You do not have proper permissions to delete the channel. - ~discord.NotFound - The channel was not found or was already deleted. - ~discord.HTTPException - Deleting the channel failed. - """ - await self._state.http.delete_channel(self.id, reason=reason) - - @overload - async def set_permissions( - self, - target: Union[Member, Role], - *, - overwrite: Optional[Union[PermissionOverwrite, _Undefined]] = ..., - reason: Optional[str] = ..., - ) -> None: - ... - - @overload - async def set_permissions( - self, - target: Union[Member, Role], - *, - reason: Optional[str] = ..., - **permissions: Optional[bool], - ) -> None: - ... - - async def set_permissions( - self, - target: Union[Member, Role], - *, - overwrite: Any = _undefined, - reason: Optional[str] = None, - **permissions: Optional[bool], - ) -> None: - r"""|coro| - - Sets the channel specific permission overwrites for a target in the - channel. - - The ``target`` parameter should either be a :class:`~discord.Member` or a - :class:`~discord.Role` that belongs to guild. - - The ``overwrite`` parameter, if given, must either be ``None`` or - :class:`~discord.PermissionOverwrite`. For convenience, you can pass in - keyword arguments denoting :class:`~discord.Permissions` attributes. If this is - done, then you cannot mix the keyword arguments with the ``overwrite`` - parameter. - - If the ``overwrite`` parameter is ``None``, then the permission - overwrites are deleted. - - You must have :attr:`~discord.Permissions.manage_roles` to do this. - - .. note:: - - This method *replaces* the old overwrites with the ones given. - - Examples - ---------- - - Setting allow and deny: :: - - await message.channel.set_permissions(message.author, read_messages=True, - send_messages=False) - - Deleting overwrites :: - - await channel.set_permissions(member, overwrite=None) - - Using :class:`~discord.PermissionOverwrite` :: - - overwrite = discord.PermissionOverwrite() - overwrite.send_messages = False - overwrite.read_messages = True - await channel.set_permissions(member, overwrite=overwrite) - - .. versionchanged:: 2.0 - This function will now raise :exc:`TypeError` instead of - ``InvalidArgument``. - - - Parameters - ----------- - target: Union[:class:`~discord.Member`, :class:`~discord.Role`] - The member or role to overwrite permissions for. - overwrite: Optional[:class:`~discord.PermissionOverwrite`] - The permissions to allow and deny to the target, or ``None`` to - delete the overwrite. - \*\*permissions - A keyword argument list of permissions to set for ease of use. - Cannot be mixed with ``overwrite``. - reason: Optional[:class:`str`] - The reason for doing this action. Shows up on the audit log. - - Raises - ------- - ~discord.Forbidden - You do not have permissions to edit channel specific permissions. - ~discord.HTTPException - Editing channel specific permissions failed. - ~discord.NotFound - The role or member being edited is not part of the guild. - TypeError - The ``overwrite`` parameter was invalid or the target type was not - :class:`~discord.Role` or :class:`~discord.Member`. - ValueError - The ``overwrite`` parameter and ``positions`` parameters were both - unset. - """ - - http = self._state.http - - if isinstance(target, User): - perm_type = _Overwrites.MEMBER - elif isinstance(target, Role): - perm_type = _Overwrites.ROLE - else: - raise ValueError('target parameter must be either Member or Role') - - if overwrite is _undefined: - if len(permissions) == 0: - raise ValueError('No overwrite provided.') - try: - overwrite = PermissionOverwrite(**permissions) - except (ValueError, TypeError): - raise TypeError('Invalid permissions given to keyword arguments.') - else: - if len(permissions) > 0: - raise TypeError('Cannot mix overwrite and keyword arguments.') - - if overwrite is None: - await http.delete_channel_permissions(self.id, target.id, reason=reason) - elif isinstance(overwrite, PermissionOverwrite): - (allow, deny) = overwrite.pair() - await http.edit_channel_permissions( - self.id, target.id, str(allow.value), str(deny.value), perm_type, reason=reason - ) - else: - raise TypeError('Invalid overwrite type provided.') - - async def _clone_impl( - self, - base_attrs: Dict[str, Any], - *, - name: Optional[str] = None, - reason: Optional[str] = None, - ) -> Self: - base_attrs['permission_overwrites'] = [x._asdict() for x in self._overwrites] - base_attrs['parent_id'] = self.category_id - base_attrs['name'] = name or self.name - guild_id = self.guild.id - cls = self.__class__ - data = await self._state.http.create_channel(guild_id, self.type.value, reason=reason, **base_attrs) - obj = cls(state=self._state, guild=self.guild, data=data) - - # temporarily add it to the cache - self.guild._channels[obj.id] = obj # type: ignore # obj is a GuildChannel - return obj - - async def clone(self, *, name: Optional[str] = None, reason: Optional[str] = None) -> Self: - """|coro| - - Clones this channel. This creates a channel with the same properties - as this channel. - - You must have :attr:`~discord.Permissions.manage_channels` to do this. - - .. versionadded:: 1.1 - - Parameters - ------------ - name: Optional[:class:`str`] - The name of the new channel. If not provided, defaults to this - channel name. - reason: Optional[:class:`str`] - The reason for cloning this channel. Shows up on the audit log. - - Raises - ------- - ~discord.Forbidden - You do not have the proper permissions to create this channel. - ~discord.HTTPException - Creating the channel failed. - - Returns - -------- - :class:`.abc.GuildChannel` - The channel that was created. - """ - raise NotImplementedError - - @overload - async def move( - self, - *, - beginning: bool, - offset: int = MISSING, - category: Optional[Snowflake] = MISSING, - sync_permissions: bool = MISSING, - reason: Optional[str] = MISSING, - ) -> None: - ... - - @overload - async def move( - self, - *, - end: bool, - offset: int = MISSING, - category: Optional[Snowflake] = MISSING, - sync_permissions: bool = MISSING, - reason: str = MISSING, - ) -> None: - ... - - @overload - async def move( - self, - *, - before: Snowflake, - offset: int = MISSING, - category: Optional[Snowflake] = MISSING, - sync_permissions: bool = MISSING, - reason: str = MISSING, - ) -> None: - ... - - @overload - async def move( - self, - *, - after: Snowflake, - offset: int = MISSING, - category: Optional[Snowflake] = MISSING, - sync_permissions: bool = MISSING, - reason: str = MISSING, - ) -> None: - ... - - async def move(self, **kwargs: Any) -> None: - """|coro| - - A rich interface to help move a channel relative to other channels. - - If exact position movement is required, ``edit`` should be used instead. - - You must have :attr:`~discord.Permissions.manage_channels` to do this. - - .. note:: - - Voice channels will always be sorted below text channels. - This is a Discord limitation. - - .. versionadded:: 1.7 - - .. versionchanged:: 2.0 - This function will now raise :exc:`TypeError` or - :exc:`ValueError` instead of ``InvalidArgument``. - - Parameters - ------------ - beginning: :class:`bool` - Whether to move the channel to the beginning of the - channel list (or category if given). - This is mutually exclusive with ``end``, ``before``, and ``after``. - end: :class:`bool` - Whether to move the channel to the end of the - channel list (or category if given). - This is mutually exclusive with ``beginning``, ``before``, and ``after``. - before: :class:`~discord.abc.Snowflake` - The channel that should be before our current channel. - This is mutually exclusive with ``beginning``, ``end``, and ``after``. - after: :class:`~discord.abc.Snowflake` - The channel that should be after our current channel. - This is mutually exclusive with ``beginning``, ``end``, and ``before``. - offset: :class:`int` - The number of channels to offset the move by. For example, - an offset of ``2`` with ``beginning=True`` would move - it 2 after the beginning. A positive number moves it below - while a negative number moves it above. Note that this - number is relative and computed after the ``beginning``, - ``end``, ``before``, and ``after`` parameters. - category: Optional[:class:`~discord.abc.Snowflake`] - The category to move this channel under. - If ``None`` is given then it moves it out of the category. - This parameter is ignored if moving a category channel. - sync_permissions: :class:`bool` - Whether to sync the permissions with the category (if given). - reason: :class:`str` - The reason for the move. - - Raises - ------- - ValueError - An invalid position was given. - TypeError - A bad mix of arguments were passed. - Forbidden - You do not have permissions to move the channel. - HTTPException - Moving the channel failed. - """ - - if not kwargs: - return - - beginning, end = kwargs.get('beginning'), kwargs.get('end') - before, after = kwargs.get('before'), kwargs.get('after') - offset = kwargs.get('offset', 0) - if sum(bool(a) for a in (beginning, end, before, after)) > 1: - raise TypeError('Only one of [before, after, end, beginning] can be used.') - - bucket = self._sorting_bucket - parent_id = kwargs.get('category', MISSING) - # fmt: off - channels: List[GuildChannel] - if parent_id not in (MISSING, None): - parent_id = parent_id.id - channels = [ - ch - for ch in self.guild.channels - if ch._sorting_bucket == bucket - and ch.category_id == parent_id - ] - else: - channels = [ - ch - for ch in self.guild.channels - if ch._sorting_bucket == bucket - and ch.category_id == self.category_id - ] - # fmt: on - - channels.sort(key=lambda c: (c.position, c.id)) - - try: - # Try to remove ourselves from the channel list - channels.remove(self) - except ValueError: - # If we're not there then it's probably due to not being in the category - pass - - index = None - if beginning: - index = 0 - elif end: - index = len(channels) - elif before: - index = next((i for i, c in enumerate(channels) if c.id == before.id), None) - elif after: - index = next((i + 1 for i, c in enumerate(channels) if c.id == after.id), None) - - if index is None: - raise ValueError('Could not resolve appropriate move position') - - channels.insert(max((index + offset), 0), self) - payload = [] - lock_permissions = kwargs.get('sync_permissions', False) - reason = kwargs.get('reason') - for index, channel in enumerate(channels): - d = {'id': channel.id, 'position': index} - if parent_id is not MISSING and channel.id == self.id: - d.update(parent_id=parent_id, lock_permissions=lock_permissions) - payload.append(d) - - await self._state.http.bulk_channel_update(self.guild.id, payload, reason=reason) - - async def create_invite( - self, - *, - reason: Optional[str] = None, - max_age: int = 0, - max_uses: int = 0, - temporary: bool = False, - unique: bool = True, - target_type: Optional[InviteTarget] = None, - target_user: Optional[User] = None, - target_application_id: Optional[int] = None, - ) -> Invite: - """|coro| - - Creates an instant invite from a text or voice channel. - - You must have :attr:`~discord.Permissions.create_instant_invite` to do this. - - Parameters - ------------ - max_age: :class:`int` - How long the invite should last in seconds. If it's 0 then the invite - doesn't expire. Defaults to ``0``. - max_uses: :class:`int` - How many uses the invite could be used for. If it's 0 then there - are unlimited uses. Defaults to ``0``. - temporary: :class:`bool` - Denotes that the invite grants temporary membership - (i.e. they get kicked after they disconnect). Defaults to ``False``. - unique: :class:`bool` - Indicates if a unique invite URL should be created. Defaults to True. - If this is set to ``False`` then it will return a previously created - invite. - reason: Optional[:class:`str`] - The reason for creating this invite. Shows up on the audit log. - target_type: Optional[:class:`.InviteTarget`] - The type of target for the voice channel invite, if any. - - .. versionadded:: 2.0 - - target_user: Optional[:class:`User`] - The user whose stream to display for this invite, required if ``target_type`` is :attr:`.InviteTarget.stream`. The user must be streaming in the channel. - - .. versionadded:: 2.0 - - target_application_id:: Optional[:class:`int`] - The id of the embedded application for the invite, required if ``target_type`` is :attr:`.InviteTarget.embedded_application`. - - .. versionadded:: 2.0 - - Raises - ------- - ~discord.HTTPException - Invite creation failed. - - ~discord.NotFound - The channel that was passed is a category or an invalid channel. - - Returns - -------- - :class:`~discord.Invite` - The invite that was created. - """ - - data = await self._state.http.create_invite( - self.id, - reason=reason, - max_age=max_age, - max_uses=max_uses, - temporary=temporary, - unique=unique, - target_type=target_type.value if target_type else None, - target_user_id=target_user.id if target_user else None, - target_application_id=target_application_id, - ) - return Invite.from_incomplete(data=data, state=self._state) - - async def invites(self) -> List[Invite]: - """|coro| - - Returns a list of all active instant invites from this channel. - - You must have :attr:`~discord.Permissions.manage_channels` to get this information. - - Raises - ------- - ~discord.Forbidden - You do not have proper permissions to get the information. - ~discord.HTTPException - An error occurred while fetching the information. - - Returns - ------- - List[:class:`~discord.Invite`] - The list of invites that are currently active. - """ - - state = self._state - data = await state.http.invites_from_channel(self.id) - guild = self.guild - return [Invite(state=state, data=invite, channel=self, guild=guild) for invite in data] - - -class Messageable: - """An ABC that details the common operations on a model that can send messages. - - The following classes implement this ABC: - - - :class:`~discord.TextChannel` - - :class:`~discord.VoiceChannel` - - :class:`~discord.StageChannel` - - :class:`~discord.DMChannel` - - :class:`~discord.GroupChannel` - - :class:`~discord.PartialMessageable` - - :class:`~discord.User` - - :class:`~discord.Member` - - :class:`~discord.ext.commands.Context` - - :class:`~discord.Thread` - """ - - __slots__ = () - _state: ConnectionState - - async def _get_channel(self) -> MessageableChannel: - raise NotImplementedError - - @overload - async def send( - self, - content: Optional[str] = ..., - *, - tts: bool = ..., - embed: Embed = ..., - file: File = ..., - stickers: Sequence[Union[GuildSticker, StickerItem]] = ..., - delete_after: float = ..., - nonce: Union[str, int] = ..., - allowed_mentions: AllowedMentions = ..., - reference: Union[Message, MessageReference, PartialMessage] = ..., - mention_author: bool = ..., - view: View = ..., - suppress_embeds: bool = ..., - silent: bool = ..., - ) -> Message: - ... - - @overload - async def send( - self, - content: Optional[str] = ..., - *, - tts: bool = ..., - embed: Embed = ..., - files: Sequence[File] = ..., - stickers: Sequence[Union[GuildSticker, StickerItem]] = ..., - delete_after: float = ..., - nonce: Union[str, int] = ..., - allowed_mentions: AllowedMentions = ..., - reference: Union[Message, MessageReference, PartialMessage] = ..., - mention_author: bool = ..., - view: View = ..., - suppress_embeds: bool = ..., - silent: bool = ..., - ) -> Message: - ... - - @overload - async def send( - self, - content: Optional[str] = ..., - *, - tts: bool = ..., - embeds: Sequence[Embed] = ..., - file: File = ..., - stickers: Sequence[Union[GuildSticker, StickerItem]] = ..., - delete_after: float = ..., - nonce: Union[str, int] = ..., - allowed_mentions: AllowedMentions = ..., - reference: Union[Message, MessageReference, PartialMessage] = ..., - mention_author: bool = ..., - view: View = ..., - suppress_embeds: bool = ..., - silent: bool = ..., - ) -> Message: - ... - - @overload - async def send( - self, - content: Optional[str] = ..., - *, - tts: bool = ..., - embeds: Sequence[Embed] = ..., - files: Sequence[File] = ..., - stickers: Sequence[Union[GuildSticker, StickerItem]] = ..., - delete_after: float = ..., - nonce: Union[str, int] = ..., - allowed_mentions: AllowedMentions = ..., - reference: Union[Message, MessageReference, PartialMessage] = ..., - mention_author: bool = ..., - view: View = ..., - suppress_embeds: bool = ..., - silent: bool = ..., - ) -> Message: - ... - - async def send( - self, - content: Optional[str] = None, - *, - tts: bool = False, - embed: Optional[Embed] = None, - embeds: Optional[Sequence[Embed]] = None, - file: Optional[File] = None, - files: Optional[Sequence[File]] = None, - stickers: Optional[Sequence[Union[GuildSticker, StickerItem]]] = None, - delete_after: Optional[float] = None, - nonce: Optional[Union[str, int]] = None, - allowed_mentions: Optional[AllowedMentions] = None, - reference: Optional[Union[Message, MessageReference, PartialMessage]] = None, - mention_author: Optional[bool] = None, - view: Optional[View] = None, - suppress_embeds: bool = False, - silent: bool = False, - ) -> Message: - """|coro| - - Sends a message to the destination with the content given. - - The content must be a type that can convert to a string through ``str(content)``. - If the content is set to ``None`` (the default), then the ``embed`` parameter must - be provided. - - To upload a single file, the ``file`` parameter should be used with a - single :class:`~discord.File` object. To upload multiple files, the ``files`` - parameter should be used with a :class:`list` of :class:`~discord.File` objects. - **Specifying both parameters will lead to an exception**. - - To upload a single embed, the ``embed`` parameter should be used with a - single :class:`~discord.Embed` object. To upload multiple embeds, the ``embeds`` - parameter should be used with a :class:`list` of :class:`~discord.Embed` objects. - **Specifying both parameters will lead to an exception**. - - .. versionchanged:: 2.0 - This function will now raise :exc:`TypeError` or - :exc:`ValueError` instead of ``InvalidArgument``. - - Parameters - ------------ - content: Optional[:class:`str`] - The content of the message to send. - tts: :class:`bool` - Indicates if the message should be sent using text-to-speech. - embed: :class:`~discord.Embed` - The rich embed for the content. - embeds: List[:class:`~discord.Embed`] - A list of embeds to upload. Must be a maximum of 10. - - .. versionadded:: 2.0 - file: :class:`~discord.File` - The file to upload. - files: List[:class:`~discord.File`] - A list of files to upload. Must be a maximum of 10. - nonce: :class:`int` - The nonce to use for sending this message. If the message was successfully sent, - then the message will have a nonce with this value. - delete_after: :class:`float` - If provided, the number of seconds to wait in the background - before deleting the message we just sent. If the deletion fails, - then it is silently ignored. - allowed_mentions: :class:`~discord.AllowedMentions` - Controls the mentions being processed in this message. If this is - passed, then the object is merged with :attr:`~discord.Client.allowed_mentions`. - The merging behaviour only overrides attributes that have been explicitly passed - to the object, otherwise it uses the attributes set in :attr:`~discord.Client.allowed_mentions`. - If no object is passed at all then the defaults given by :attr:`~discord.Client.allowed_mentions` - are used instead. - - .. versionadded:: 1.4 - - reference: Union[:class:`~discord.Message`, :class:`~discord.MessageReference`, :class:`~discord.PartialMessage`] - A reference to the :class:`~discord.Message` to which you are replying, this can be created using - :meth:`~discord.Message.to_reference` or passed directly as a :class:`~discord.Message`. You can control - whether this mentions the author of the referenced message using the :attr:`~discord.AllowedMentions.replied_user` - attribute of ``allowed_mentions`` or by setting ``mention_author``. - - .. versionadded:: 1.6 - - mention_author: Optional[:class:`bool`] - If set, overrides the :attr:`~discord.AllowedMentions.replied_user` attribute of ``allowed_mentions``. - - .. versionadded:: 1.6 - view: :class:`discord.ui.View` - A Discord UI View to add to the message. - - .. versionadded:: 2.0 - stickers: Sequence[Union[:class:`~discord.GuildSticker`, :class:`~discord.StickerItem`]] - A list of stickers to upload. Must be a maximum of 3. - - .. versionadded:: 2.0 - suppress_embeds: :class:`bool` - Whether to suppress embeds for the message. This sends the message without any embeds if set to ``True``. - - .. versionadded:: 2.0 - silent: :class:`bool` - Whether to suppress push and desktop notifications for the message. This will increment the mention counter - in the UI, but will not actually send a notification. - - .. versionadded:: 2.2 - - Raises - -------- - ~discord.HTTPException - Sending the message failed. - ~discord.Forbidden - You do not have the proper permissions to send the message. - ValueError - The ``files`` or ``embeds`` list is not of the appropriate size. - TypeError - You specified both ``file`` and ``files``, - or you specified both ``embed`` and ``embeds``, - or the ``reference`` object is not a :class:`~discord.Message`, - :class:`~discord.MessageReference` or :class:`~discord.PartialMessage`. - - Returns - --------- - :class:`~discord.Message` - The message that was sent. - """ - - channel = await self._get_channel() - state = self._state - content = str(content) if content is not None else None - previous_allowed_mention = state.allowed_mentions - - if stickers is not None: - sticker_ids: SnowflakeList = [sticker.id for sticker in stickers] - else: - sticker_ids = MISSING - - if reference is not None: - try: - reference_dict = reference.to_message_reference_dict() - except AttributeError: - raise TypeError('reference parameter must be Message, MessageReference, or PartialMessage') from None - else: - reference_dict = MISSING - - if view and not hasattr(view, '__discord_ui_view__'): - raise TypeError(f'view parameter must be View not {view.__class__.__name__}') - - if suppress_embeds or silent: - from .message import MessageFlags # circular import - - flags = MessageFlags._from_value(0) - flags.suppress_embeds = suppress_embeds - flags.suppress_notifications = silent - else: - flags = MISSING - - with handle_message_parameters( - content=content, - tts=tts, - file=file if file is not None else MISSING, - files=files if files is not None else MISSING, - embed=embed if embed is not None else MISSING, - embeds=embeds if embeds is not None else MISSING, - nonce=nonce, - allowed_mentions=allowed_mentions, - message_reference=reference_dict, - previous_allowed_mentions=previous_allowed_mention, - mention_author=mention_author, - stickers=sticker_ids, - view=view, - flags=flags, - ) as params: - data = await state.http.send_message(channel.id, params=params) - - ret = state.create_message(channel=channel, data=data) - if view and not view.is_finished(): - state.store_view(view, ret.id) - - if delete_after is not None: - await ret.delete(delay=delete_after) - return ret - - def typing(self) -> Typing: - """Returns an asynchronous context manager that allows you to send a typing indicator to - the destination for an indefinite period of time, or 10 seconds if the context manager - is called using ``await``. - - Example Usage: :: - - async with channel.typing(): - # simulate something heavy - await asyncio.sleep(20) - - await channel.send('Done!') - - Example Usage: :: - - await channel.typing() - # Do some computational magic for about 10 seconds - await channel.send('Done!') - - .. versionchanged:: 2.0 - This no longer works with the ``with`` syntax, ``async with`` must be used instead. - - .. versionchanged:: 2.0 - Added functionality to ``await`` the context manager to send a typing indicator for 10 seconds. - """ - return Typing(self) - - async def fetch_message(self, id: int, /) -> Message: - """|coro| - - Retrieves a single :class:`~discord.Message` from the destination. - - Parameters - ------------ - id: :class:`int` - The message ID to look for. - - Raises - -------- - ~discord.NotFound - The specified message was not found. - ~discord.Forbidden - You do not have the permissions required to get a message. - ~discord.HTTPException - Retrieving the message failed. - - Returns - -------- - :class:`~discord.Message` - The message asked for. - """ - - channel = await self._get_channel() - data = await self._state.http.get_message(channel.id, id) - return self._state.create_message(channel=channel, data=data) - - async def pins(self) -> List[Message]: - """|coro| - - Retrieves all messages that are currently pinned in the channel. - - .. note:: - - Due to a limitation with the Discord API, the :class:`.Message` - objects returned by this method do not contain complete - :attr:`.Message.reactions` data. - - Raises - ------- - ~discord.Forbidden - You do not have the permission to retrieve pinned messages. - ~discord.HTTPException - Retrieving the pinned messages failed. - - Returns - -------- - List[:class:`~discord.Message`] - The messages that are currently pinned. - """ - - channel = await self._get_channel() - state = self._state - data = await state.http.pins_from(channel.id) - return [state.create_message(channel=channel, data=m) for m in data] - - async def history( - self, - *, - limit: Optional[int] = 100, - before: Optional[SnowflakeTime] = None, - after: Optional[SnowflakeTime] = None, - around: Optional[SnowflakeTime] = None, - oldest_first: Optional[bool] = None, - ) -> AsyncIterator[Message]: - """Returns an :term:`asynchronous iterator` that enables receiving the destination's message history. - - You must have :attr:`~discord.Permissions.read_message_history` to do this. - - Examples - --------- - - Usage :: - - counter = 0 - async for message in channel.history(limit=200): - if message.author == client.user: - counter += 1 - - Flattening into a list: :: - - messages = [message async for message in channel.history(limit=123)] - # messages is now a list of Message... - - All parameters are optional. - - Parameters - ----------- - limit: Optional[:class:`int`] - The number of messages to retrieve. - If ``None``, retrieves every message in the channel. Note, however, - that this would make it a slow operation. - before: Optional[Union[:class:`~discord.abc.Snowflake`, :class:`datetime.datetime`]] - Retrieve messages before this date or message. - If a datetime is provided, it is recommended to use a UTC aware datetime. - If the datetime is naive, it is assumed to be local time. - after: Optional[Union[:class:`~discord.abc.Snowflake`, :class:`datetime.datetime`]] - Retrieve messages after this date or message. - If a datetime is provided, it is recommended to use a UTC aware datetime. - If the datetime is naive, it is assumed to be local time. - around: Optional[Union[:class:`~discord.abc.Snowflake`, :class:`datetime.datetime`]] - Retrieve messages around this date or message. - If a datetime is provided, it is recommended to use a UTC aware datetime. - If the datetime is naive, it is assumed to be local time. - When using this argument, the maximum limit is 101. Note that if the limit is an - even number then this will return at most limit + 1 messages. - oldest_first: Optional[:class:`bool`] - If set to ``True``, return messages in oldest->newest order. Defaults to ``True`` if - ``after`` is specified, otherwise ``False``. - - Raises - ------ - ~discord.Forbidden - You do not have permissions to get channel message history. - ~discord.HTTPException - The request to get message history failed. - - Yields - ------- - :class:`~discord.Message` - The message with the message data parsed. - """ - - async def _around_strategy(retrieve: int, around: Optional[Snowflake], limit: Optional[int]): - if not around: - return [], None, 0 - - around_id = around.id if around else None - data = await self._state.http.logs_from(channel.id, retrieve, around=around_id) - - return data, None, 0 - - async def _after_strategy(retrieve: int, after: Optional[Snowflake], limit: Optional[int]): - after_id = after.id if after else None - data = await self._state.http.logs_from(channel.id, retrieve, after=after_id) - - if data: - if limit is not None: - limit -= len(data) - - after = Object(id=int(data[0]['id'])) - - return data, after, limit - - async def _before_strategy(retrieve: int, before: Optional[Snowflake], limit: Optional[int]): - before_id = before.id if before else None - data = await self._state.http.logs_from(channel.id, retrieve, before=before_id) - - if data: - if limit is not None: - limit -= len(data) - - before = Object(id=int(data[-1]['id'])) - - return data, before, limit - - if isinstance(before, datetime): - before = Object(id=utils.time_snowflake(before, high=False)) - if isinstance(after, datetime): - after = Object(id=utils.time_snowflake(after, high=True)) - if isinstance(around, datetime): - around = Object(id=utils.time_snowflake(around)) - - if oldest_first is None: - reverse = after is not None - else: - reverse = oldest_first - - after = after or OLDEST_OBJECT - predicate = None - - if around: - if limit is None: - raise ValueError('history does not support around with limit=None') - if limit > 101: - raise ValueError("history max limit 101 when specifying around parameter") - - # Strange Discord quirk - limit = 100 if limit == 101 else limit - - strategy, state = _around_strategy, around - - if before and after: - predicate = lambda m: after.id < int(m['id']) < before.id - elif before: - predicate = lambda m: int(m['id']) < before.id - elif after: - predicate = lambda m: after.id < int(m['id']) - elif reverse: - strategy, state = _after_strategy, after - if before: - predicate = lambda m: int(m['id']) < before.id - else: - strategy, state = _before_strategy, before - if after and after != OLDEST_OBJECT: - predicate = lambda m: int(m['id']) > after.id - - channel = await self._get_channel() - - while True: - retrieve = 100 if limit is None else min(limit, 100) - if retrieve < 1: - return - - data, state, limit = await strategy(retrieve, state, limit) - - if reverse: - data = reversed(data) - if predicate: - data = filter(predicate, data) - - count = 0 - - for count, raw_message in enumerate(data, 1): - yield self._state.create_message(channel=channel, data=raw_message) - - if count < 100: - # There's no data left after this - break - - -class Connectable(Protocol): - """An ABC that details the common operations on a channel that can - connect to a voice server. - - The following implement this ABC: - - - :class:`~discord.VoiceChannel` - - :class:`~discord.StageChannel` - """ - - __slots__ = () - _state: ConnectionState - - def _get_voice_client_key(self) -> Tuple[int, str]: - raise NotImplementedError - - def _get_voice_state_pair(self) -> Tuple[int, int]: - raise NotImplementedError - - async def connect( - self, - *, - timeout: float = 60.0, - reconnect: bool = True, - cls: Callable[[Client, Connectable], T] = VoiceClient, - self_deaf: bool = False, - self_mute: bool = False, - ) -> T: - """|coro| - - Connects to voice and creates a :class:`~discord.VoiceClient` to establish - your connection to the voice server. - - This requires :attr:`~discord.Intents.voice_states`. - - Parameters - ----------- - timeout: :class:`float` - The timeout in seconds to wait for the voice endpoint. - reconnect: :class:`bool` - Whether the bot should automatically attempt - a reconnect if a part of the handshake fails - or the gateway goes down. - cls: Type[:class:`~discord.VoiceProtocol`] - A type that subclasses :class:`~discord.VoiceProtocol` to connect with. - Defaults to :class:`~discord.VoiceClient`. - self_mute: :class:`bool` - Indicates if the client should be self-muted. - - .. versionadded:: 2.0 - self_deaf: :class:`bool` - Indicates if the client should be self-deafened. - - .. versionadded:: 2.0 - - Raises - ------- - asyncio.TimeoutError - Could not connect to the voice channel in time. - ~discord.ClientException - You are already connected to a voice channel. - ~discord.opus.OpusNotLoaded - The opus library has not been loaded. - - Returns - -------- - :class:`~discord.VoiceProtocol` - A voice client that is fully connected to the voice server. - """ - - key_id, _ = self._get_voice_client_key() - state = self._state - - if state._get_voice_client(key_id): - raise ClientException('Already connected to a voice channel.') - - client = state._get_client() - voice: T = cls(client, self) - - if not isinstance(voice, VoiceProtocol): - raise TypeError('Type must meet VoiceProtocol abstract base class.') - - state._add_voice_client(key_id, voice) - - try: - await voice.connect(timeout=timeout, reconnect=reconnect, self_deaf=self_deaf, self_mute=self_mute) - except asyncio.TimeoutError: - try: - await voice.disconnect(force=True) - except Exception: - # we don't care if disconnect failed because connection failed - pass - raise # re-raise - - return voice diff --git a/.venv/Lib/site-packages/discord/activity.py b/.venv/Lib/site-packages/discord/activity.py deleted file mode 100644 index 534d12a..0000000 --- a/.venv/Lib/site-packages/discord/activity.py +++ /dev/null @@ -1,842 +0,0 @@ -""" -The MIT License (MIT) - -Copyright (c) 2015-present Rapptz - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the "Software"), -to deal in the Software without restriction, including without limitation -the rights to use, copy, modify, merge, publish, distribute, sublicense, -and/or sell copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. -""" - -from __future__ import annotations - -import datetime -from typing import Any, Dict, List, Optional, TYPE_CHECKING, Union, overload - -from .asset import Asset -from .enums import ActivityType, try_enum -from .colour import Colour -from .partial_emoji import PartialEmoji -from .utils import _get_as_snowflake - -__all__ = ( - 'BaseActivity', - 'Activity', - 'Streaming', - 'Game', - 'Spotify', - 'CustomActivity', -) - -"""If curious, this is the current schema for an activity. - -It's fairly long so I will document it here: - -All keys are optional. - -state: str (max: 128), -details: str (max: 128) -timestamps: dict - start: int (min: 1) - end: int (min: 1) -assets: dict - large_image: str (max: 32) - large_text: str (max: 128) - small_image: str (max: 32) - small_text: str (max: 128) -party: dict - id: str (max: 128), - size: List[int] (max-length: 2) - elem: int (min: 1) -secrets: dict - match: str (max: 128) - join: str (max: 128) - spectate: str (max: 128) -instance: bool -application_id: str -name: str (max: 128) -url: str -type: int -sync_id: str -session_id: str -flags: int -buttons: list[str (max: 32)] - -There are also activity flags which are mostly uninteresting for the library atm. - -t.ActivityFlags = { - INSTANCE: 1, - JOIN: 2, - SPECTATE: 4, - JOIN_REQUEST: 8, - SYNC: 16, - PLAY: 32 -} -""" - -if TYPE_CHECKING: - from .types.activity import ( - Activity as ActivityPayload, - ActivityTimestamps, - ActivityParty, - ActivityAssets, - ) - - from .state import ConnectionState - - -class BaseActivity: - """The base activity that all user-settable activities inherit from. - A user-settable activity is one that can be used in :meth:`Client.change_presence`. - - The following types currently count as user-settable: - - - :class:`Activity` - - :class:`Game` - - :class:`Streaming` - - :class:`CustomActivity` - - Note that although these types are considered user-settable by the library, - Discord typically ignores certain combinations of activity depending on - what is currently set. This behaviour may change in the future so there are - no guarantees on whether Discord will actually let you set these types. - - .. versionadded:: 1.3 - """ - - __slots__ = ('_created_at',) - - def __init__(self, **kwargs: Any) -> None: - self._created_at: Optional[float] = kwargs.pop('created_at', None) - - @property - def created_at(self) -> Optional[datetime.datetime]: - """Optional[:class:`datetime.datetime`]: When the user started doing this activity in UTC. - - .. versionadded:: 1.3 - """ - if self._created_at is not None: - return datetime.datetime.fromtimestamp(self._created_at / 1000, tz=datetime.timezone.utc) - - def to_dict(self) -> ActivityPayload: - raise NotImplementedError - - -class Activity(BaseActivity): - """Represents an activity in Discord. - - This could be an activity such as streaming, playing, listening - or watching. - - For memory optimisation purposes, some activities are offered in slimmed - down versions: - - - :class:`Game` - - :class:`Streaming` - - Attributes - ------------ - application_id: Optional[:class:`int`] - The application ID of the game. - name: Optional[:class:`str`] - The name of the activity. - url: Optional[:class:`str`] - A stream URL that the activity could be doing. - type: :class:`ActivityType` - The type of activity currently being done. - state: Optional[:class:`str`] - The user's current state. For example, "In Game". - details: Optional[:class:`str`] - The detail of the user's current activity. - timestamps: :class:`dict` - A dictionary of timestamps. It contains the following optional keys: - - - ``start``: Corresponds to when the user started doing the - activity in milliseconds since Unix epoch. - - ``end``: Corresponds to when the user will finish doing the - activity in milliseconds since Unix epoch. - - assets: :class:`dict` - A dictionary representing the images and their hover text of an activity. - It contains the following optional keys: - - - ``large_image``: A string representing the ID for the large image asset. - - ``large_text``: A string representing the text when hovering over the large image asset. - - ``small_image``: A string representing the ID for the small image asset. - - ``small_text``: A string representing the text when hovering over the small image asset. - - party: :class:`dict` - A dictionary representing the activity party. It contains the following optional keys: - - - ``id``: A string representing the party ID. - - ``size``: A list of up to two integer elements denoting (current_size, maximum_size). - buttons: List[:class:`str`] - A list of strings representing the labels of custom buttons shown in a rich presence. - - .. versionadded:: 2.0 - - emoji: Optional[:class:`PartialEmoji`] - The emoji that belongs to this activity. - """ - - __slots__ = ( - 'state', - 'details', - 'timestamps', - 'assets', - 'party', - 'flags', - 'sync_id', - 'session_id', - 'type', - 'name', - 'url', - 'application_id', - 'emoji', - 'buttons', - ) - - def __init__(self, **kwargs: Any) -> None: - super().__init__(**kwargs) - self.state: Optional[str] = kwargs.pop('state', None) - self.details: Optional[str] = kwargs.pop('details', None) - self.timestamps: ActivityTimestamps = kwargs.pop('timestamps', {}) - self.assets: ActivityAssets = kwargs.pop('assets', {}) - self.party: ActivityParty = kwargs.pop('party', {}) - self.application_id: Optional[int] = _get_as_snowflake(kwargs, 'application_id') - self.name: Optional[str] = kwargs.pop('name', None) - self.url: Optional[str] = kwargs.pop('url', None) - self.flags: int = kwargs.pop('flags', 0) - self.sync_id: Optional[str] = kwargs.pop('sync_id', None) - self.session_id: Optional[str] = kwargs.pop('session_id', None) - self.buttons: List[str] = kwargs.pop('buttons', []) - - activity_type = kwargs.pop('type', -1) - self.type: ActivityType = ( - activity_type if isinstance(activity_type, ActivityType) else try_enum(ActivityType, activity_type) - ) - - emoji = kwargs.pop('emoji', None) - self.emoji: Optional[PartialEmoji] = PartialEmoji.from_dict(emoji) if emoji is not None else None - - def __repr__(self) -> str: - attrs = ( - ('type', self.type), - ('name', self.name), - ('url', self.url), - ('details', self.details), - ('application_id', self.application_id), - ('session_id', self.session_id), - ('emoji', self.emoji), - ) - inner = ' '.join('%s=%r' % t for t in attrs) - return f'' - - def to_dict(self) -> Dict[str, Any]: - ret: Dict[str, Any] = {} - for attr in self.__slots__: - value = getattr(self, attr, None) - if value is None: - continue - - if isinstance(value, dict) and len(value) == 0: - continue - - ret[attr] = value - ret['type'] = int(self.type) - if self.emoji: - ret['emoji'] = self.emoji.to_dict() - return ret - - @property - def start(self) -> Optional[datetime.datetime]: - """Optional[:class:`datetime.datetime`]: When the user started doing this activity in UTC, if applicable.""" - try: - timestamp = self.timestamps['start'] / 1000 - except KeyError: - return None - else: - return datetime.datetime.fromtimestamp(timestamp, tz=datetime.timezone.utc) - - @property - def end(self) -> Optional[datetime.datetime]: - """Optional[:class:`datetime.datetime`]: When the user will stop doing this activity in UTC, if applicable.""" - try: - timestamp = self.timestamps['end'] / 1000 - except KeyError: - return None - else: - return datetime.datetime.fromtimestamp(timestamp, tz=datetime.timezone.utc) - - @property - def large_image_url(self) -> Optional[str]: - """Optional[:class:`str`]: Returns a URL pointing to the large image asset of this activity, if applicable.""" - try: - large_image = self.assets['large_image'] - except KeyError: - return None - else: - return self._image_url(large_image) - - @property - def small_image_url(self) -> Optional[str]: - """Optional[:class:`str`]: Returns a URL pointing to the small image asset of this activity, if applicable.""" - try: - small_image = self.assets['small_image'] - except KeyError: - return None - else: - return self._image_url(small_image) - - def _image_url(self, image: str) -> Optional[str]: - if image.startswith('mp:'): - return f'https://media.discordapp.net/{image[3:]}' - elif self.application_id is not None: - return Asset.BASE + f'/app-assets/{self.application_id}/{image}.png' - - @property - def large_image_text(self) -> Optional[str]: - """Optional[:class:`str`]: Returns the large image asset hover text of this activity, if applicable.""" - return self.assets.get('large_text', None) - - @property - def small_image_text(self) -> Optional[str]: - """Optional[:class:`str`]: Returns the small image asset hover text of this activity, if applicable.""" - return self.assets.get('small_text', None) - - -class Game(BaseActivity): - """A slimmed down version of :class:`Activity` that represents a Discord game. - - This is typically displayed via **Playing** on the official Discord client. - - .. container:: operations - - .. describe:: x == y - - Checks if two games are equal. - - .. describe:: x != y - - Checks if two games are not equal. - - .. describe:: hash(x) - - Returns the game's hash. - - .. describe:: str(x) - - Returns the game's name. - - Parameters - ----------- - name: :class:`str` - The game's name. - - Attributes - ----------- - name: :class:`str` - The game's name. - """ - - __slots__ = ('name', '_end', '_start') - - def __init__(self, name: str, **extra: Any) -> None: - super().__init__(**extra) - self.name: str = name - - try: - timestamps: ActivityTimestamps = extra['timestamps'] - except KeyError: - self._start = 0 - self._end = 0 - else: - self._start = timestamps.get('start', 0) - self._end = timestamps.get('end', 0) - - @property - def type(self) -> ActivityType: - """:class:`ActivityType`: Returns the game's type. This is for compatibility with :class:`Activity`. - - It always returns :attr:`ActivityType.playing`. - """ - return ActivityType.playing - - @property - def start(self) -> Optional[datetime.datetime]: - """Optional[:class:`datetime.datetime`]: When the user started playing this game in UTC, if applicable.""" - if self._start: - return datetime.datetime.fromtimestamp(self._start / 1000, tz=datetime.timezone.utc) - return None - - @property - def end(self) -> Optional[datetime.datetime]: - """Optional[:class:`datetime.datetime`]: When the user will stop playing this game in UTC, if applicable.""" - if self._end: - return datetime.datetime.fromtimestamp(self._end / 1000, tz=datetime.timezone.utc) - return None - - def __str__(self) -> str: - return str(self.name) - - def __repr__(self) -> str: - return f'' - - def to_dict(self) -> Dict[str, Any]: - timestamps: Dict[str, Any] = {} - if self._start: - timestamps['start'] = self._start - - if self._end: - timestamps['end'] = self._end - - return { - 'type': ActivityType.playing.value, - 'name': str(self.name), - 'timestamps': timestamps, - } - - def __eq__(self, other: object) -> bool: - return isinstance(other, Game) and other.name == self.name - - def __ne__(self, other: object) -> bool: - return not self.__eq__(other) - - def __hash__(self) -> int: - return hash(self.name) - - -class Streaming(BaseActivity): - """A slimmed down version of :class:`Activity` that represents a Discord streaming status. - - This is typically displayed via **Streaming** on the official Discord client. - - .. container:: operations - - .. describe:: x == y - - Checks if two streams are equal. - - .. describe:: x != y - - Checks if two streams are not equal. - - .. describe:: hash(x) - - Returns the stream's hash. - - .. describe:: str(x) - - Returns the stream's name. - - Attributes - ----------- - platform: Optional[:class:`str`] - Where the user is streaming from (ie. YouTube, Twitch). - - .. versionadded:: 1.3 - - name: Optional[:class:`str`] - The stream's name. - details: Optional[:class:`str`] - An alias for :attr:`name` - game: Optional[:class:`str`] - The game being streamed. - - .. versionadded:: 1.3 - - url: :class:`str` - The stream's URL. - assets: :class:`dict` - A dictionary comprising of similar keys than those in :attr:`Activity.assets`. - """ - - __slots__ = ('platform', 'name', 'game', 'url', 'details', 'assets') - - def __init__(self, *, name: Optional[str], url: str, **extra: Any) -> None: - super().__init__(**extra) - self.platform: Optional[str] = name - self.name: Optional[str] = extra.pop('details', name) - self.game: Optional[str] = extra.pop('state', None) - self.url: str = url - self.details: Optional[str] = extra.pop('details', self.name) # compatibility - self.assets: ActivityAssets = extra.pop('assets', {}) - - @property - def type(self) -> ActivityType: - """:class:`ActivityType`: Returns the game's type. This is for compatibility with :class:`Activity`. - - It always returns :attr:`ActivityType.streaming`. - """ - return ActivityType.streaming - - def __str__(self) -> str: - return str(self.name) - - def __repr__(self) -> str: - return f'' - - @property - def twitch_name(self) -> Optional[str]: - """Optional[:class:`str`]: If provided, the twitch name of the user streaming. - - This corresponds to the ``large_image`` key of the :attr:`Streaming.assets` - dictionary if it starts with ``twitch:``. Typically set by the Discord client. - """ - - try: - name = self.assets['large_image'] - except KeyError: - return None - else: - return name[7:] if name[:7] == 'twitch:' else None - - def to_dict(self) -> Dict[str, Any]: - ret: Dict[str, Any] = { - 'type': ActivityType.streaming.value, - 'name': str(self.name), - 'url': str(self.url), - 'assets': self.assets, - } - if self.details: - ret['details'] = self.details - return ret - - def __eq__(self, other: object) -> bool: - return isinstance(other, Streaming) and other.name == self.name and other.url == self.url - - def __ne__(self, other: object) -> bool: - return not self.__eq__(other) - - def __hash__(self) -> int: - return hash(self.name) - - -class Spotify: - """Represents a Spotify listening activity from Discord. This is a special case of - :class:`Activity` that makes it easier to work with the Spotify integration. - - .. container:: operations - - .. describe:: x == y - - Checks if two activities are equal. - - .. describe:: x != y - - Checks if two activities are not equal. - - .. describe:: hash(x) - - Returns the activity's hash. - - .. describe:: str(x) - - Returns the string 'Spotify'. - """ - - __slots__ = ('_state', '_details', '_timestamps', '_assets', '_party', '_sync_id', '_session_id', '_created_at') - - def __init__(self, **data: Any) -> None: - self._state: str = data.pop('state', '') - self._details: str = data.pop('details', '') - self._timestamps: ActivityTimestamps = data.pop('timestamps', {}) - self._assets: ActivityAssets = data.pop('assets', {}) - self._party: ActivityParty = data.pop('party', {}) - self._sync_id: str = data.pop('sync_id', '') - self._session_id: Optional[str] = data.pop('session_id') - self._created_at: Optional[float] = data.pop('created_at', None) - - @property - def type(self) -> ActivityType: - """:class:`ActivityType`: Returns the activity's type. This is for compatibility with :class:`Activity`. - - It always returns :attr:`ActivityType.listening`. - """ - return ActivityType.listening - - @property - def created_at(self) -> Optional[datetime.datetime]: - """Optional[:class:`datetime.datetime`]: When the user started listening in UTC. - - .. versionadded:: 1.3 - """ - if self._created_at is not None: - return datetime.datetime.fromtimestamp(self._created_at / 1000, tz=datetime.timezone.utc) - - @property - def colour(self) -> Colour: - """:class:`Colour`: Returns the Spotify integration colour, as a :class:`Colour`. - - There is an alias for this named :attr:`color`""" - return Colour(0x1DB954) - - @property - def color(self) -> Colour: - """:class:`Colour`: Returns the Spotify integration colour, as a :class:`Colour`. - - There is an alias for this named :attr:`colour`""" - return self.colour - - def to_dict(self) -> Dict[str, Any]: - return { - 'flags': 48, # SYNC | PLAY - 'name': 'Spotify', - 'assets': self._assets, - 'party': self._party, - 'sync_id': self._sync_id, - 'session_id': self._session_id, - 'timestamps': self._timestamps, - 'details': self._details, - 'state': self._state, - } - - @property - def name(self) -> str: - """:class:`str`: The activity's name. This will always return "Spotify".""" - return 'Spotify' - - def __eq__(self, other: object) -> bool: - return ( - isinstance(other, Spotify) - and other._session_id == self._session_id - and other._sync_id == self._sync_id - and other.start == self.start - ) - - def __ne__(self, other: object) -> bool: - return not self.__eq__(other) - - def __hash__(self) -> int: - return hash(self._session_id) - - def __str__(self) -> str: - return 'Spotify' - - def __repr__(self) -> str: - return f'' - - @property - def title(self) -> str: - """:class:`str`: The title of the song being played.""" - return self._details - - @property - def artists(self) -> List[str]: - """List[:class:`str`]: The artists of the song being played.""" - return self._state.split('; ') - - @property - def artist(self) -> str: - """:class:`str`: The artist of the song being played. - - This does not attempt to split the artist information into - multiple artists. Useful if there's only a single artist. - """ - return self._state - - @property - def album(self) -> str: - """:class:`str`: The album that the song being played belongs to.""" - return self._assets.get('large_text', '') - - @property - def album_cover_url(self) -> str: - """:class:`str`: The album cover image URL from Spotify's CDN.""" - large_image = self._assets.get('large_image', '') - if large_image[:8] != 'spotify:': - return '' - album_image_id = large_image[8:] - return 'https://i.scdn.co/image/' + album_image_id - - @property - def track_id(self) -> str: - """:class:`str`: The track ID used by Spotify to identify this song.""" - return self._sync_id - - @property - def track_url(self) -> str: - """:class:`str`: The track URL to listen on Spotify. - - .. versionadded:: 2.0 - """ - return f'https://open.spotify.com/track/{self.track_id}' - - @property - def start(self) -> datetime.datetime: - """:class:`datetime.datetime`: When the user started playing this song in UTC.""" - # the start key will be present here - return datetime.datetime.fromtimestamp(self._timestamps['start'] / 1000, tz=datetime.timezone.utc) # type: ignore - - @property - def end(self) -> datetime.datetime: - """:class:`datetime.datetime`: When the user will stop playing this song in UTC.""" - # the end key will be present here - return datetime.datetime.fromtimestamp(self._timestamps['end'] / 1000, tz=datetime.timezone.utc) # type: ignore - - @property - def duration(self) -> datetime.timedelta: - """:class:`datetime.timedelta`: The duration of the song being played.""" - return self.end - self.start - - @property - def party_id(self) -> str: - """:class:`str`: The party ID of the listening party.""" - return self._party.get('id', '') - - -class CustomActivity(BaseActivity): - """Represents a custom activity from Discord. - - .. container:: operations - - .. describe:: x == y - - Checks if two activities are equal. - - .. describe:: x != y - - Checks if two activities are not equal. - - .. describe:: hash(x) - - Returns the activity's hash. - - .. describe:: str(x) - - Returns the custom status text. - - .. versionadded:: 1.3 - - Attributes - ----------- - name: Optional[:class:`str`] - The custom activity's name. - emoji: Optional[:class:`PartialEmoji`] - The emoji to pass to the activity, if any. - """ - - __slots__ = ('name', 'emoji', 'state') - - def __init__(self, name: Optional[str], *, emoji: Optional[PartialEmoji] = None, **extra: Any) -> None: - super().__init__(**extra) - self.name: Optional[str] = name - self.state: Optional[str] = extra.pop('state', name) - if self.name == 'Custom Status': - self.name = self.state - - self.emoji: Optional[PartialEmoji] - if emoji is None: - self.emoji = emoji - elif isinstance(emoji, dict): - self.emoji = PartialEmoji.from_dict(emoji) - elif isinstance(emoji, str): - self.emoji = PartialEmoji(name=emoji) - elif isinstance(emoji, PartialEmoji): - self.emoji = emoji - else: - raise TypeError(f'Expected str, PartialEmoji, or None, received {type(emoji)!r} instead.') - - @property - def type(self) -> ActivityType: - """:class:`ActivityType`: Returns the activity's type. This is for compatibility with :class:`Activity`. - - It always returns :attr:`ActivityType.custom`. - """ - return ActivityType.custom - - def to_dict(self) -> Dict[str, Any]: - if self.name == self.state: - o = { - 'type': ActivityType.custom.value, - 'state': self.name, - 'name': 'Custom Status', - } - else: - o = { - 'type': ActivityType.custom.value, - 'name': self.name, - } - - if self.emoji: - o['emoji'] = self.emoji.to_dict() - return o - - def __eq__(self, other: object) -> bool: - return isinstance(other, CustomActivity) and other.name == self.name and other.emoji == self.emoji - - def __ne__(self, other: object) -> bool: - return not self.__eq__(other) - - def __hash__(self) -> int: - return hash((self.name, str(self.emoji))) - - def __str__(self) -> str: - if self.emoji: - if self.name: - return f'{self.emoji} {self.name}' - return str(self.emoji) - else: - return str(self.name) - - def __repr__(self) -> str: - return f'' - - -ActivityTypes = Union[Activity, Game, CustomActivity, Streaming, Spotify] - - -@overload -def create_activity(data: ActivityPayload, state: ConnectionState) -> ActivityTypes: - ... - - -@overload -def create_activity(data: None, state: ConnectionState) -> None: - ... - - -def create_activity(data: Optional[ActivityPayload], state: ConnectionState) -> Optional[ActivityTypes]: - if not data: - return None - - game_type = try_enum(ActivityType, data.get('type', -1)) - if game_type is ActivityType.playing: - if 'application_id' in data or 'session_id' in data: - return Activity(**data) - return Game(**data) - elif game_type is ActivityType.custom: - try: - name = data.pop('name') # type: ignore - except KeyError: - ret = Activity(**data) - else: - # we removed the name key from data already - ret = CustomActivity(name=name, **data) # type: ignore - elif game_type is ActivityType.streaming: - if 'url' in data: - # the url won't be None here - return Streaming(**data) # type: ignore - return Activity(**data) - elif game_type is ActivityType.listening and 'sync_id' in data and 'session_id' in data: - return Spotify(**data) - else: - ret = Activity(**data) - - if isinstance(ret.emoji, PartialEmoji): - ret.emoji._state = state - return ret diff --git a/.venv/Lib/site-packages/discord/app_commands/__init__.py b/.venv/Lib/site-packages/discord/app_commands/__init__.py deleted file mode 100644 index 9714617..0000000 --- a/.venv/Lib/site-packages/discord/app_commands/__init__.py +++ /dev/null @@ -1,20 +0,0 @@ -""" -discord.app_commands -~~~~~~~~~~~~~~~~~~~~~ - -Application commands support for the Discord API - -:copyright: (c) 2015-present Rapptz -:license: MIT, see LICENSE for more details. - -""" - -from .commands import * -from .errors import * -from .models import * -from .tree import * -from .namespace import * -from .transformers import * -from .translator import * -from . import checks as checks -from .checks import Cooldown as Cooldown diff --git a/.venv/Lib/site-packages/discord/app_commands/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/discord/app_commands/__pycache__/__init__.cpython-311.pyc deleted file mode 100644 index 62d5b22..0000000 Binary files a/.venv/Lib/site-packages/discord/app_commands/__pycache__/__init__.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/app_commands/__pycache__/checks.cpython-311.pyc b/.venv/Lib/site-packages/discord/app_commands/__pycache__/checks.cpython-311.pyc deleted file mode 100644 index 906f0c7..0000000 Binary files a/.venv/Lib/site-packages/discord/app_commands/__pycache__/checks.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/app_commands/__pycache__/commands.cpython-311.pyc b/.venv/Lib/site-packages/discord/app_commands/__pycache__/commands.cpython-311.pyc deleted file mode 100644 index 7064b83..0000000 Binary files a/.venv/Lib/site-packages/discord/app_commands/__pycache__/commands.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/app_commands/__pycache__/errors.cpython-311.pyc b/.venv/Lib/site-packages/discord/app_commands/__pycache__/errors.cpython-311.pyc deleted file mode 100644 index fab1d1b..0000000 Binary files a/.venv/Lib/site-packages/discord/app_commands/__pycache__/errors.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/app_commands/__pycache__/models.cpython-311.pyc b/.venv/Lib/site-packages/discord/app_commands/__pycache__/models.cpython-311.pyc deleted file mode 100644 index 495de7c..0000000 Binary files a/.venv/Lib/site-packages/discord/app_commands/__pycache__/models.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/app_commands/__pycache__/namespace.cpython-311.pyc b/.venv/Lib/site-packages/discord/app_commands/__pycache__/namespace.cpython-311.pyc deleted file mode 100644 index 8103323..0000000 Binary files a/.venv/Lib/site-packages/discord/app_commands/__pycache__/namespace.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/app_commands/__pycache__/transformers.cpython-311.pyc b/.venv/Lib/site-packages/discord/app_commands/__pycache__/transformers.cpython-311.pyc deleted file mode 100644 index 9c09029..0000000 Binary files a/.venv/Lib/site-packages/discord/app_commands/__pycache__/transformers.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/app_commands/__pycache__/translator.cpython-311.pyc b/.venv/Lib/site-packages/discord/app_commands/__pycache__/translator.cpython-311.pyc deleted file mode 100644 index 197b97c..0000000 Binary files a/.venv/Lib/site-packages/discord/app_commands/__pycache__/translator.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/app_commands/__pycache__/tree.cpython-311.pyc b/.venv/Lib/site-packages/discord/app_commands/__pycache__/tree.cpython-311.pyc deleted file mode 100644 index fe96e7c..0000000 Binary files a/.venv/Lib/site-packages/discord/app_commands/__pycache__/tree.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/app_commands/checks.py b/.venv/Lib/site-packages/discord/app_commands/checks.py deleted file mode 100644 index f6c0948..0000000 --- a/.venv/Lib/site-packages/discord/app_commands/checks.py +++ /dev/null @@ -1,537 +0,0 @@ -""" -The MIT License (MIT) - -Copyright (c) 2015-present Rapptz - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the "Software"), -to deal in the Software without restriction, including without limitation -the rights to use, copy, modify, merge, publish, distribute, sublicense, -and/or sell copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. -""" - -from __future__ import annotations - -from typing import ( - Any, - Coroutine, - Dict, - Hashable, - Union, - Callable, - TypeVar, - Optional, - TYPE_CHECKING, -) - -import time - -from .commands import check -from .errors import ( - NoPrivateMessage, - MissingRole, - MissingAnyRole, - MissingPermissions, - BotMissingPermissions, - CommandOnCooldown, -) - -from ..user import User -from ..permissions import Permissions -from ..utils import get as utils_get, MISSING, maybe_coroutine - -T = TypeVar('T') - -if TYPE_CHECKING: - from typing_extensions import Self - from ..interactions import Interaction - - CooldownFunction = Union[ - Callable[[Interaction[Any]], Coroutine[Any, Any, T]], - Callable[[Interaction[Any]], T], - ] - -__all__ = ( - 'has_role', - 'has_any_role', - 'has_permissions', - 'bot_has_permissions', - 'cooldown', - 'dynamic_cooldown', -) - - -class Cooldown: - """Represents a cooldown for a command. - - .. versionadded:: 2.0 - - Attributes - ----------- - rate: :class:`float` - The total number of tokens available per :attr:`per` seconds. - per: :class:`float` - The length of the cooldown period in seconds. - """ - - __slots__ = ('rate', 'per', '_window', '_tokens', '_last') - - def __init__(self, rate: float, per: float) -> None: - self.rate: int = int(rate) - self.per: float = float(per) - self._window: float = 0.0 - self._tokens: int = self.rate - self._last: float = 0.0 - - def get_tokens(self, current: Optional[float] = None) -> int: - """Returns the number of available tokens before rate limiting is applied. - - Parameters - ------------ - current: Optional[:class:`float`] - The time in seconds since Unix epoch to calculate tokens at. - If not supplied then :func:`time.time()` is used. - - Returns - -------- - :class:`int` - The number of tokens available before the cooldown is to be applied. - """ - if not current: - current = time.time() - - # the calculated tokens should be non-negative - tokens = max(self._tokens, 0) - - if current > self._window + self.per: - tokens = self.rate - return tokens - - def get_retry_after(self, current: Optional[float] = None) -> float: - """Returns the time in seconds until the cooldown will be reset. - - Parameters - ------------- - current: Optional[:class:`float`] - The current time in seconds since Unix epoch. - If not supplied, then :func:`time.time()` is used. - - Returns - ------- - :class:`float` - The number of seconds to wait before this cooldown will be reset. - """ - current = current or time.time() - tokens = self.get_tokens(current) - - if tokens == 0: - return self.per - (current - self._window) - - return 0.0 - - def update_rate_limit(self, current: Optional[float] = None, *, tokens: int = 1) -> Optional[float]: - """Updates the cooldown rate limit. - - Parameters - ------------- - current: Optional[:class:`float`] - The time in seconds since Unix epoch to update the rate limit at. - If not supplied, then :func:`time.time()` is used. - tokens: :class:`int` - The amount of tokens to deduct from the rate limit. - - Returns - ------- - Optional[:class:`float`] - The retry-after time in seconds if rate limited. - """ - current = current or time.time() - self._last = current - - self._tokens = self.get_tokens(current) - - # first token used means that we start a new rate limit window - if self._tokens == self.rate: - self._window = current - - # decrement tokens by specified number - self._tokens -= tokens - - # check if we are rate limited and return retry-after - if self._tokens < 0: - return self.per - (current - self._window) - - def reset(self) -> None: - """Reset the cooldown to its initial state.""" - self._tokens = self.rate - self._last = 0.0 - - def copy(self) -> Self: - """Creates a copy of this cooldown. - - Returns - -------- - :class:`Cooldown` - A new instance of this cooldown. - """ - return Cooldown(self.rate, self.per) - - def __repr__(self) -> str: - return f'' - - -def has_role(item: Union[int, str], /) -> Callable[[T], T]: - """A :func:`~discord.app_commands.check` that is added that checks if the member invoking the - command has the role specified via the name or ID specified. - - If a string is specified, you must give the exact name of the role, including - caps and spelling. - - If an integer is specified, you must give the exact snowflake ID of the role. - - This check raises one of two special exceptions, :exc:`~discord.app_commands.MissingRole` - if the user is missing a role, or :exc:`~discord.app_commands.NoPrivateMessage` if - it is used in a private message. Both inherit from :exc:`~discord.app_commands.CheckFailure`. - - .. versionadded:: 2.0 - - .. note:: - - This is different from the permission system that Discord provides for application - commands. This is done entirely locally in the program rather than being handled - by Discord. - - Parameters - ----------- - item: Union[:class:`int`, :class:`str`] - The name or ID of the role to check. - """ - - def predicate(interaction: Interaction) -> bool: - if isinstance(interaction.user, User): - raise NoPrivateMessage() - - if isinstance(item, int): - role = interaction.user.get_role(item) - else: - role = utils_get(interaction.user.roles, name=item) - - if role is None: - raise MissingRole(item) - return True - - return check(predicate) - - -def has_any_role(*items: Union[int, str]) -> Callable[[T], T]: - r"""A :func:`~discord.app_commands.check` that is added that checks if the member - invoking the command has **any** of the roles specified. This means that if they have - one out of the three roles specified, then this check will return ``True``. - - Similar to :func:`has_role`\, the names or IDs passed in must be exact. - - This check raises one of two special exceptions, :exc:`~discord.app_commands.MissingAnyRole` - if the user is missing all roles, or :exc:`~discord.app_commands.NoPrivateMessage` if - it is used in a private message. Both inherit from :exc:`~discord.app_commands.CheckFailure`. - - .. versionadded:: 2.0 - - .. note:: - - This is different from the permission system that Discord provides for application - commands. This is done entirely locally in the program rather than being handled - by Discord. - - Parameters - ----------- - items: List[Union[:class:`str`, :class:`int`]] - An argument list of names or IDs to check that the member has roles wise. - - Example - -------- - - .. code-block:: python3 - - @tree.command() - @app_commands.checks.has_any_role('Library Devs', 'Moderators', 492212595072434186) - async def cool(interaction: discord.Interaction): - await interaction.response.send_message('You are cool indeed') - """ - - def predicate(interaction: Interaction) -> bool: - if isinstance(interaction.user, User): - raise NoPrivateMessage() - - if any( - interaction.user.get_role(item) is not None - if isinstance(item, int) - else utils_get(interaction.user.roles, name=item) is not None - for item in items - ): - return True - raise MissingAnyRole(list(items)) - - return check(predicate) - - -def has_permissions(**perms: bool) -> Callable[[T], T]: - r"""A :func:`~discord.app_commands.check` that is added that checks if the member - has all of the permissions necessary. - - Note that this check operates on the permissions given by - :attr:`discord.Interaction.permissions`. - - The permissions passed in must be exactly like the properties shown under - :class:`discord.Permissions`. - - This check raises a special exception, :exc:`~discord.app_commands.MissingPermissions` - that is inherited from :exc:`~discord.app_commands.CheckFailure`. - - .. versionadded:: 2.0 - - .. note:: - - This is different from the permission system that Discord provides for application - commands. This is done entirely locally in the program rather than being handled - by Discord. - - Parameters - ------------ - \*\*perms: :class:`bool` - Keyword arguments denoting the permissions to check for. - - Example - --------- - - .. code-block:: python3 - - @tree.command() - @app_commands.checks.has_permissions(manage_messages=True) - async def test(interaction: discord.Interaction): - await interaction.response.send_message('You can manage messages.') - - """ - - invalid = perms.keys() - Permissions.VALID_FLAGS.keys() - if invalid: - raise TypeError(f"Invalid permission(s): {', '.join(invalid)}") - - def predicate(interaction: Interaction) -> bool: - permissions = interaction.permissions - - missing = [perm for perm, value in perms.items() if getattr(permissions, perm) != value] - - if not missing: - return True - - raise MissingPermissions(missing) - - return check(predicate) - - -def bot_has_permissions(**perms: bool) -> Callable[[T], T]: - """Similar to :func:`has_permissions` except checks if the bot itself has - the permissions listed. This relies on :attr:`discord.Interaction.app_permissions`. - - This check raises a special exception, :exc:`~discord.app_commands.BotMissingPermissions` - that is inherited from :exc:`~discord.app_commands.CheckFailure`. - - .. versionadded:: 2.0 - """ - - invalid = set(perms) - set(Permissions.VALID_FLAGS) - if invalid: - raise TypeError(f"Invalid permission(s): {', '.join(invalid)}") - - def predicate(interaction: Interaction) -> bool: - permissions = interaction.app_permissions - missing = [perm for perm, value in perms.items() if getattr(permissions, perm) != value] - - if not missing: - return True - - raise BotMissingPermissions(missing) - - return check(predicate) - - -def _create_cooldown_decorator( - key: CooldownFunction[Hashable], factory: CooldownFunction[Optional[Cooldown]] -) -> Callable[[T], T]: - - mapping: Dict[Any, Cooldown] = {} - - async def get_bucket( - interaction: Interaction, - *, - mapping: Dict[Any, Cooldown] = mapping, - key: CooldownFunction[Hashable] = key, - factory: CooldownFunction[Optional[Cooldown]] = factory, - ) -> Optional[Cooldown]: - current = interaction.created_at.timestamp() - dead_keys = [k for k, v in mapping.items() if current > v._last + v.per] - for k in dead_keys: - del mapping[k] - - k = await maybe_coroutine(key, interaction) - if k not in mapping: - bucket: Optional[Cooldown] = await maybe_coroutine(factory, interaction) - if bucket is not None: - mapping[k] = bucket - else: - bucket = mapping[k] - - return bucket - - async def predicate(interaction: Interaction) -> bool: - bucket = await get_bucket(interaction) - if bucket is None: - return True - - retry_after = bucket.update_rate_limit(interaction.created_at.timestamp()) - if retry_after is None: - return True - - raise CommandOnCooldown(bucket, retry_after) - - return check(predicate) - - -def cooldown( - rate: float, - per: float, - *, - key: Optional[CooldownFunction[Hashable]] = MISSING, -) -> Callable[[T], T]: - """A decorator that adds a cooldown to a command. - - A cooldown allows a command to only be used a specific amount - of times in a specific time frame. These cooldowns are based off - of the ``key`` function provided. If a ``key`` is not provided - then it defaults to a user-level cooldown. The ``key`` function - must take a single parameter, the :class:`discord.Interaction` and - return a value that is used as a key to the internal cooldown mapping. - - The ``key`` function can optionally be a coroutine. - - If a cooldown is triggered, then :exc:`~discord.app_commands.CommandOnCooldown` is - raised to the error handlers. - - Examples - --------- - - Setting a one per 5 seconds per member cooldown on a command: - - .. code-block:: python3 - - @tree.command() - @app_commands.checks.cooldown(1, 5.0, key=lambda i: (i.guild_id, i.user.id)) - async def test(interaction: discord.Interaction): - await interaction.response.send_message('Hello') - - @test.error - async def on_test_error(interaction: discord.Interaction, error: app_commands.AppCommandError): - if isinstance(error, app_commands.CommandOnCooldown): - await interaction.response.send_message(str(error), ephemeral=True) - - Parameters - ------------ - rate: :class:`int` - The number of times a command can be used before triggering a cooldown. - per: :class:`float` - The amount of seconds to wait for a cooldown when it's been triggered. - key: Optional[Callable[[:class:`discord.Interaction`], :class:`collections.abc.Hashable`]] - A function that returns a key to the mapping denoting the type of cooldown. - Can optionally be a coroutine. If not given then defaults to a user-level - cooldown. If ``None`` is passed then it is interpreted as a "global" cooldown. - """ - - if key is MISSING: - key_func = lambda interaction: interaction.user.id - elif key is None: - key_func = lambda i: None - else: - key_func = key - - factory = lambda interaction: Cooldown(rate, per) - - return _create_cooldown_decorator(key_func, factory) - - -def dynamic_cooldown( - factory: CooldownFunction[Optional[Cooldown]], - *, - key: Optional[CooldownFunction[Hashable]] = MISSING, -) -> Callable[[T], T]: - """A decorator that adds a dynamic cooldown to a command. - - A cooldown allows a command to only be used a specific amount - of times in a specific time frame. These cooldowns are based off - of the ``key`` function provided. If a ``key`` is not provided - then it defaults to a user-level cooldown. The ``key`` function - must take a single parameter, the :class:`discord.Interaction` and - return a value that is used as a key to the internal cooldown mapping. - - If a ``factory`` function is given, it must be a function that - accepts a single parameter of type :class:`discord.Interaction` and must - return a :class:`~discord.app_commands.Cooldown` or ``None``. - If ``None`` is returned then that cooldown is effectively bypassed. - - Both ``key`` and ``factory`` can optionally be coroutines. - - If a cooldown is triggered, then :exc:`~discord.app_commands.CommandOnCooldown` is - raised to the error handlers. - - Examples - --------- - - Setting a cooldown for everyone but the owner. - - .. code-block:: python3 - - def cooldown_for_everyone_but_me(interaction: discord.Interaction) -> Optional[app_commands.Cooldown]: - if interaction.user.id == 80088516616269824: - return None - return app_commands.Cooldown(1, 10.0) - - @tree.command() - @app_commands.checks.dynamic_cooldown(cooldown_for_everyone_but_me) - async def test(interaction: discord.Interaction): - await interaction.response.send_message('Hello') - - @test.error - async def on_test_error(interaction: discord.Interaction, error: app_commands.AppCommandError): - if isinstance(error, app_commands.CommandOnCooldown): - await interaction.response.send_message(str(error), ephemeral=True) - - Parameters - ------------ - factory: Optional[Callable[[:class:`discord.Interaction`], Optional[:class:`~discord.app_commands.Cooldown`]]] - A function that takes an interaction and returns a cooldown that will apply to that interaction - or ``None`` if the interaction should not have a cooldown. - key: Optional[Callable[[:class:`discord.Interaction`], :class:`collections.abc.Hashable`]] - A function that returns a key to the mapping denoting the type of cooldown. - Can optionally be a coroutine. If not given then defaults to a user-level - cooldown. If ``None`` is passed then it is interpreted as a "global" cooldown. - """ - - if key is MISSING: - key_func = lambda interaction: interaction.user.id - elif key is None: - key_func = lambda i: None - else: - key_func = key - - return _create_cooldown_decorator(key_func, factory) diff --git a/.venv/Lib/site-packages/discord/app_commands/commands.py b/.venv/Lib/site-packages/discord/app_commands/commands.py deleted file mode 100644 index 8e66934..0000000 --- a/.venv/Lib/site-packages/discord/app_commands/commands.py +++ /dev/null @@ -1,2479 +0,0 @@ -""" -The MIT License (MIT) - -Copyright (c) 2015-present Rapptz - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the "Software"), -to deal in the Software without restriction, including without limitation -the rights to use, copy, modify, merge, publish, distribute, sublicense, -and/or sell copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. -""" - -from __future__ import annotations -import inspect - -from typing import ( - Any, - Callable, - ClassVar, - Coroutine, - Dict, - Generator, - Generic, - List, - MutableMapping, - Optional, - Set, - TYPE_CHECKING, - Tuple, - Type, - TypeVar, - Union, - overload, -) - -import re -from copy import copy as shallow_copy - -from ..enums import AppCommandOptionType, AppCommandType, ChannelType, Locale -from .models import Choice -from .transformers import annotation_to_parameter, CommandParameter, NoneType -from .errors import AppCommandError, CheckFailure, CommandInvokeError, CommandSignatureMismatch, CommandAlreadyRegistered -from .translator import TranslationContextLocation, TranslationContext, Translator, locale_str -from ..message import Message -from ..user import User -from ..member import Member -from ..permissions import Permissions -from ..utils import resolve_annotation, MISSING, is_inside_class, maybe_coroutine, async_all, _shorten, _to_kebab_case - -if TYPE_CHECKING: - from typing_extensions import ParamSpec, Concatenate - from ..interactions import Interaction - from ..abc import Snowflake - from .namespace import Namespace - from .models import ChoiceT - - # Generally, these two libraries are supposed to be separate from each other. - # However, for type hinting purposes it's unfortunately necessary for one to - # reference the other to prevent type checking errors in callbacks - from discord.ext import commands - - ErrorFunc = Callable[[Interaction, AppCommandError], Coroutine[Any, Any, None]] - -__all__ = ( - 'Command', - 'ContextMenu', - 'Group', - 'Parameter', - 'context_menu', - 'command', - 'describe', - 'check', - 'rename', - 'choices', - 'autocomplete', - 'guilds', - 'guild_only', - 'default_permissions', -) - -if TYPE_CHECKING: - P = ParamSpec('P') -else: - P = TypeVar('P') - -T = TypeVar('T') -F = TypeVar('F', bound=Callable[..., Any]) -GroupT = TypeVar('GroupT', bound='Binding') -Coro = Coroutine[Any, Any, T] -UnboundError = Callable[['Interaction[Any]', AppCommandError], Coro[Any]] -Error = Union[ - Callable[[GroupT, 'Interaction[Any]', AppCommandError], Coro[Any]], - UnboundError, -] -Check = Callable[['Interaction[Any]'], Union[bool, Coro[bool]]] -Binding = Union['Group', 'commands.Cog'] - - -if TYPE_CHECKING: - CommandCallback = Union[ - Callable[Concatenate[GroupT, 'Interaction[Any]', P], Coro[T]], - Callable[Concatenate['Interaction[Any]', P], Coro[T]], - ] - - ContextMenuCallback = Union[ - # If groups end up support context menus these would be uncommented - # Callable[[GroupT, 'Interaction', Member], Coro[Any]], - # Callable[[GroupT, 'Interaction', User], Coro[Any]], - # Callable[[GroupT, 'Interaction', Message], Coro[Any]], - # Callable[[GroupT, 'Interaction', Union[Member, User]], Coro[Any]], - Callable[['Interaction[Any]', Member], Coro[Any]], - Callable[['Interaction[Any]', User], Coro[Any]], - Callable[['Interaction[Any]', Message], Coro[Any]], - Callable[['Interaction[Any]', Union[Member, User]], Coro[Any]], - ] - - AutocompleteCallback = Union[ - Callable[[GroupT, 'Interaction[Any]', str], Coro[List[Choice[ChoiceT]]]], - Callable[['Interaction[Any]', str], Coro[List[Choice[ChoiceT]]]], - ] -else: - CommandCallback = Callable[..., Coro[T]] - ContextMenuCallback = Callable[..., Coro[T]] - AutocompleteCallback = Callable[..., Coro[T]] - - -CheckInputParameter = Union['Command[Any, ..., Any]', 'ContextMenu', 'CommandCallback[Any, ..., Any]', ContextMenuCallback] - -# The re module doesn't support \p{} so we have to list characters from Thai and Devanagari manually. -THAI_COMBINING = r'\u0e31-\u0e3a\u0e47-\u0e4e' -DEVANAGARI_COMBINING = r'\u0900-\u0903\u093a\u093b\u093c\u093e\u093f\u0940-\u094f\u0955\u0956\u0957\u0962\u0963' -VALID_SLASH_COMMAND_NAME = re.compile(r'^[-_\w' + THAI_COMBINING + DEVANAGARI_COMBINING + r']{1,32}$') - -ARG_NAME_SUBREGEX = r'(?:\\?\*){0,2}(?P\w+)' - -ARG_DESCRIPTION_SUBREGEX = r'(?P(?:.|\n)+?(?:\Z|\r?\n(?=[\S\r\n])))' - -ARG_TYPE_SUBREGEX = r'(?:.+)' - -GOOGLE_DOCSTRING_ARG_REGEX = re.compile( - rf'^{ARG_NAME_SUBREGEX}[ \t]*(?:\({ARG_TYPE_SUBREGEX}\))?[ \t]*:[ \t]*{ARG_DESCRIPTION_SUBREGEX}', - re.MULTILINE, -) - -SPHINX_DOCSTRING_ARG_REGEX = re.compile( - rf'^:param {ARG_NAME_SUBREGEX}:[ \t]+{ARG_DESCRIPTION_SUBREGEX}', - re.MULTILINE, -) - -NUMPY_DOCSTRING_ARG_REGEX = re.compile( - rf'^{ARG_NAME_SUBREGEX}(?:[ \t]*:)?(?:[ \t]+{ARG_TYPE_SUBREGEX})?[ \t]*\r?\n[ \t]+{ARG_DESCRIPTION_SUBREGEX}', - re.MULTILINE, -) - - -def _parse_args_from_docstring(func: Callable[..., Any], params: Dict[str, CommandParameter]) -> Dict[str, str]: - docstring = inspect.getdoc(func) - - if docstring is None: - return {} - - # Extract the arguments - # Note: These are loose regexes, but they are good enough for our purposes - # For Google-style, look only at the lines that are indented - section_lines = inspect.cleandoc('\n'.join(line for line in docstring.splitlines() if line.startswith(' '))) - docstring_styles = ( - GOOGLE_DOCSTRING_ARG_REGEX.finditer(section_lines), - SPHINX_DOCSTRING_ARG_REGEX.finditer(docstring), - NUMPY_DOCSTRING_ARG_REGEX.finditer(docstring), - ) - - return { - m.group('name'): m.group('description') for matches in docstring_styles for m in matches if m.group('name') in params - } - - -def validate_name(name: str) -> str: - match = VALID_SLASH_COMMAND_NAME.match(name) - if match is None: - raise ValueError( - f'{name!r} must be between 1-32 characters and contain only lower-case letters, numbers, hyphens, or underscores.' - ) - - # Ideally, name.islower() would work instead but since certain characters - # are Lo (e.g. CJK) those don't pass the test. I'd use `casefold` instead as - # well, but chances are the server-side check is probably something similar to - # this code anyway. - if name.lower() != name: - raise ValueError(f'{name!r} must be all lower-case') - return name - - -def validate_context_menu_name(name: str) -> str: - if not name or len(name) > 32: - raise ValueError('context menu names must be between 1-32 characters') - return name - - -def validate_auto_complete_callback( - callback: AutocompleteCallback[GroupT, ChoiceT] -) -> AutocompleteCallback[GroupT, ChoiceT]: - # This function needs to ensure the following is true: - # If self.foo is passed then don't pass command.binding to the callback - # If Class.foo is passed then it is assumed command.binding has to be passed - # If free_function_foo is passed then no binding should be passed at all - # Passing command.binding is mandated by pass_command_binding - - binding = getattr(callback, '__self__', None) - pass_command_binding = binding is None and is_inside_class(callback) - - # 'method' objects can't have dynamic attributes - if binding is None: - callback.pass_command_binding = pass_command_binding - - required_parameters = 2 + pass_command_binding - params = inspect.signature(callback).parameters - if len(params) != required_parameters: - raise TypeError(f'autocomplete callback {callback.__qualname__!r} requires either 2 or 3 parameters to be passed') - - return callback - - -def _context_menu_annotation(annotation: Any, *, _none: type = NoneType) -> AppCommandType: - if annotation is Message: - return AppCommandType.message - - supported_types: Set[Any] = {Member, User} - if annotation in supported_types: - return AppCommandType.user - - # Check if there's an origin - origin = getattr(annotation, '__origin__', None) - if origin is not Union: - # Only Union is supported so bail early - msg = ( - f'unsupported type annotation {annotation!r}, must be either discord.Member, ' - 'discord.User, discord.Message, or a typing.Union of discord.Member and discord.User' - ) - raise TypeError(msg) - - # Only Union[Member, User] is supported - if not all(arg in supported_types for arg in annotation.__args__): - raise TypeError(f'unsupported types given inside {annotation!r}') - - return AppCommandType.user - - -def _populate_descriptions(params: Dict[str, CommandParameter], descriptions: Dict[str, Any]) -> None: - for name, param in params.items(): - description = descriptions.pop(name, MISSING) - if description is MISSING: - param.description = '…' - continue - - if not isinstance(description, (str, locale_str)): - raise TypeError('description must be a string') - - if isinstance(description, str): - param.description = _shorten(description) - else: - param.description = description - - if descriptions: - first = next(iter(descriptions)) - raise TypeError(f'unknown parameter given: {first}') - - -def _populate_renames(params: Dict[str, CommandParameter], renames: Dict[str, Union[str, locale_str]]) -> None: - rename_map: Dict[str, Union[str, locale_str]] = {} - - # original name to renamed name - - for name in params.keys(): - new_name = renames.pop(name, MISSING) - - if new_name is MISSING: - rename_map[name] = name - continue - - if name in rename_map: - raise ValueError(f'{new_name} is already used') - - if isinstance(new_name, str): - new_name = validate_name(new_name) - else: - validate_name(new_name.message) - - rename_map[name] = new_name - params[name]._rename = new_name - - if renames: - first = next(iter(renames)) - raise ValueError(f'unknown parameter given: {first}') - - -def _populate_choices(params: Dict[str, CommandParameter], all_choices: Dict[str, List[Choice]]) -> None: - for name, param in params.items(): - choices = all_choices.pop(name, MISSING) - if choices is MISSING: - continue - - if not isinstance(choices, list): - raise TypeError('choices must be a list of Choice') - - if not all(isinstance(choice, Choice) for choice in choices): - raise TypeError('choices must be a list of Choice') - - if param.type not in (AppCommandOptionType.string, AppCommandOptionType.number, AppCommandOptionType.integer): - raise TypeError('choices are only supported for integer, string, or number option types') - - if not all(param.type == choice._option_type for choice in choices): - raise TypeError('choices must all have the same inner option type as the parameter choice type') - - param.choices = choices - - if all_choices: - first = next(iter(all_choices)) - raise TypeError(f'unknown parameter given: {first}') - - -def _populate_autocomplete(params: Dict[str, CommandParameter], autocomplete: Dict[str, Any]) -> None: - for name, param in params.items(): - callback = autocomplete.pop(name, MISSING) - if callback is MISSING: - continue - - if not inspect.iscoroutinefunction(callback): - raise TypeError('autocomplete callback must be a coroutine function') - - if param.type not in (AppCommandOptionType.string, AppCommandOptionType.number, AppCommandOptionType.integer): - raise TypeError('autocomplete is only supported for integer, string, or number option types') - - if param.is_choice_annotation(): - raise TypeError( - 'Choice annotation unsupported for autocomplete parameters, consider using a regular annotation instead' - ) - - param.autocomplete = validate_auto_complete_callback(callback) - - if autocomplete: - first = next(iter(autocomplete)) - raise TypeError(f'unknown parameter given: {first}') - - -def _extract_parameters_from_callback(func: Callable[..., Any], globalns: Dict[str, Any]) -> Dict[str, CommandParameter]: - params = inspect.signature(func).parameters - cache = {} - required_params = is_inside_class(func) + 1 - if len(params) < required_params: - raise TypeError(f'callback {func.__qualname__!r} must have more than {required_params - 1} parameter(s)') - - iterator = iter(params.values()) - for _ in range(0, required_params): - next(iterator) - - parameters: List[CommandParameter] = [] - for parameter in iterator: - if parameter.annotation is parameter.empty: - raise TypeError(f'parameter {parameter.name!r} is missing a type annotation in callback {func.__qualname__!r}') - - resolved = resolve_annotation(parameter.annotation, globalns, globalns, cache) - param = annotation_to_parameter(resolved, parameter) - parameters.append(param) - - values = sorted(parameters, key=lambda a: a.required, reverse=True) - result = {v.name: v for v in values} - - descriptions = _parse_args_from_docstring(func, result) - - try: - descriptions.update(func.__discord_app_commands_param_description__) - except AttributeError: - for param in values: - if param.description is MISSING: - param.description = '…' - if descriptions: - _populate_descriptions(result, descriptions) - - try: - renames = func.__discord_app_commands_param_rename__ - except AttributeError: - pass - else: - _populate_renames(result, renames.copy()) - - try: - choices = func.__discord_app_commands_param_choices__ - except AttributeError: - pass - else: - _populate_choices(result, choices.copy()) - - try: - autocomplete = func.__discord_app_commands_param_autocomplete__ - except AttributeError: - pass - else: - _populate_autocomplete(result, autocomplete.copy()) - - return result - - -def _get_context_menu_parameter(func: ContextMenuCallback) -> Tuple[str, Any, AppCommandType]: - params = inspect.signature(func).parameters - if is_inside_class(func) and not hasattr(func, '__self__'): - raise TypeError('context menus cannot be defined inside a class') - - if len(params) != 2: - msg = ( - f'context menu callback {func.__qualname__!r} requires 2 parameters, ' - 'the first one being the interaction and the other one explicitly ' - 'annotated with either discord.Message, discord.User, discord.Member, ' - 'or a typing.Union of discord.Member and discord.User' - ) - raise TypeError(msg) - - iterator = iter(params.values()) - next(iterator) # skip interaction - parameter = next(iterator) - if parameter.annotation is parameter.empty: - msg = ( - f'second parameter of context menu callback {func.__qualname__!r} must be explicitly ' - 'annotated with either discord.Message, discord.User, discord.Member, or ' - 'a typing.Union of discord.Member and discord.User' - ) - raise TypeError(msg) - - resolved = resolve_annotation(parameter.annotation, func.__globals__, func.__globals__, {}) - type = _context_menu_annotation(resolved) - return (parameter.name, resolved, type) - - -def mark_overrideable(func: F) -> F: - func.__discord_app_commands_base_function__ = None - return func - - -class Parameter: - """A class that contains the parameter information of a :class:`Command` callback. - - .. versionadded:: 2.0 - - Attributes - ----------- - name: :class:`str` - The name of the parameter. This is the Python identifier for the parameter. - display_name: :class:`str` - The displayed name of the parameter on Discord. - description: :class:`str` - The description of the parameter. - autocomplete: :class:`bool` - Whether the parameter has an autocomplete handler. - locale_name: Optional[:class:`locale_str`] - The display name's locale string, if available. - locale_description: Optional[:class:`locale_str`] - The description's locale string, if available. - required: :class:`bool` - Whether the parameter is required - choices: List[:class:`~discord.app_commands.Choice`] - A list of choices this parameter takes, if any. - type: :class:`~discord.AppCommandOptionType` - The underlying type of this parameter. - channel_types: List[:class:`~discord.ChannelType`] - The channel types that are allowed for this parameter. - min_value: Optional[Union[:class:`int`, :class:`float`]] - The minimum supported value for this parameter. - max_value: Optional[Union[:class:`int`, :class:`float`]] - The maximum supported value for this parameter. - default: Any - The default value of the parameter, if given. - If not given then this is :data:`~discord.utils.MISSING`. - command: :class:`Command` - The command this parameter is attached to. - """ - - def __init__(self, parent: CommandParameter, command: Command[Any, ..., Any]) -> None: - self.__parent: CommandParameter = parent - self.__command: Command[Any, ..., Any] = command - - @property - def command(self) -> Command[Any, ..., Any]: - return self.__command - - @property - def name(self) -> str: - return self.__parent.name - - @property - def display_name(self) -> str: - return self.__parent.display_name - - @property - def required(self) -> bool: - return self.__parent.required - - @property - def description(self) -> str: - return str(self.__parent.description) - - @property - def locale_name(self) -> Optional[locale_str]: - if isinstance(self.__parent._rename, locale_str): - return self.__parent._rename - return None - - @property - def locale_description(self) -> Optional[locale_str]: - if isinstance(self.__parent.description, locale_str): - return self.__parent.description - return None - - @property - def autocomplete(self) -> bool: - return self.__parent.autocomplete is not None - - @property - def default(self) -> Any: - return self.__parent.default - - @property - def type(self) -> AppCommandOptionType: - return self.__parent.type - - @property - def choices(self) -> List[Choice[Union[int, float, str]]]: - choices = self.__parent.choices - if choices is MISSING: - return [] - return choices.copy() - - @property - def channel_types(self) -> List[ChannelType]: - channel_types = self.__parent.channel_types - if channel_types is MISSING: - return [] - return channel_types.copy() - - @property - def min_value(self) -> Optional[Union[int, float]]: - return self.__parent.min_value - - @property - def max_value(self) -> Optional[Union[int, float]]: - return self.__parent.max_value - - -class Command(Generic[GroupT, P, T]): - """A class that implements an application command. - - These are usually not created manually, instead they are created using - one of the following decorators: - - - :func:`~discord.app_commands.command` - - :meth:`Group.command ` - - :meth:`CommandTree.command ` - - .. versionadded:: 2.0 - - Parameters - ----------- - name: Union[:class:`str`, :class:`locale_str`] - The name of the application command. - description: Union[:class:`str`, :class:`locale_str`] - The description of the application command. This shows up in the UI to describe - the application command. - callback: :ref:`coroutine ` - The coroutine that is executed when the command is called. - auto_locale_strings: :class:`bool` - If this is set to ``True``, then all translatable strings will implicitly - be wrapped into :class:`locale_str` rather than :class:`str`. This could - avoid some repetition and be more ergonomic for certain defaults such - as default command names, command descriptions, and parameter names. - Defaults to ``True``. - nsfw: :class:`bool` - Whether the command is NSFW and should only work in NSFW channels. - Defaults to ``False``. - - Due to a Discord limitation, this does not work on subcommands. - parent: Optional[:class:`Group`] - The parent application command. ``None`` if there isn't one. - extras: :class:`dict` - A dictionary that can be used to store extraneous data. - The library will not touch any values or keys within this dictionary. - - Attributes - ------------ - name: :class:`str` - The name of the application command. - description: :class:`str` - The description of the application command. This shows up in the UI to describe - the application command. - checks - A list of predicates that take a :class:`~discord.Interaction` parameter - to indicate whether the command callback should be executed. If an exception - is necessary to be thrown to signal failure, then one inherited from - :exc:`AppCommandError` should be used. If all the checks fail without - propagating an exception, :exc:`CheckFailure` is raised. - default_permissions: Optional[:class:`~discord.Permissions`] - The default permissions that can execute this command on Discord. Note - that server administrators can override this value in the client. - Setting an empty permissions field will disallow anyone except server - administrators from using the command in a guild. - - Due to a Discord limitation, this does not work on subcommands. - guild_only: :class:`bool` - Whether the command should only be usable in guild contexts. - - Due to a Discord limitation, this does not work on subcommands. - nsfw: :class:`bool` - Whether the command is NSFW and should only work in NSFW channels. - - Due to a Discord limitation, this does not work on subcommands. - parent: Optional[:class:`Group`] - The parent application command. ``None`` if there isn't one. - extras: :class:`dict` - A dictionary that can be used to store extraneous data. - The library will not touch any values or keys within this dictionary. - """ - - def __init__( - self, - *, - name: Union[str, locale_str], - description: Union[str, locale_str], - callback: CommandCallback[GroupT, P, T], - nsfw: bool = False, - parent: Optional[Group] = None, - guild_ids: Optional[List[int]] = None, - auto_locale_strings: bool = True, - extras: Dict[Any, Any] = MISSING, - ): - name, locale = (name.message, name) if isinstance(name, locale_str) else (name, None) - self.name: str = validate_name(name) - self._locale_name: Optional[locale_str] = locale - description, locale = ( - (description.message, description) if isinstance(description, locale_str) else (description, None) - ) - self.description: str = description - self._locale_description: Optional[locale_str] = locale - self._attr: Optional[str] = None - self._callback: CommandCallback[GroupT, P, T] = callback - self.parent: Optional[Group] = parent - self.binding: Optional[GroupT] = None - self.on_error: Optional[Error[GroupT]] = None - self.module: Optional[str] = callback.__module__ - - # Unwrap __self__ for bound methods - try: - self.binding = callback.__self__ - self._callback = callback = callback.__func__ - except AttributeError: - pass - - self._params: Dict[str, CommandParameter] = _extract_parameters_from_callback(callback, callback.__globals__) - self.checks: List[Check] = getattr(callback, '__discord_app_commands_checks__', []) - self._guild_ids: Optional[List[int]] = guild_ids or getattr( - callback, '__discord_app_commands_default_guilds__', None - ) - self.default_permissions: Optional[Permissions] = getattr( - callback, '__discord_app_commands_default_permissions__', None - ) - self.guild_only: bool = getattr(callback, '__discord_app_commands_guild_only__', False) - self.nsfw: bool = nsfw - self.extras: Dict[Any, Any] = extras or {} - - if self._guild_ids is not None and self.parent is not None: - raise ValueError('child commands cannot have default guilds set, consider setting them in the parent instead') - - if auto_locale_strings: - self._convert_to_locale_strings() - - def _convert_to_locale_strings(self) -> None: - if self._locale_name is None: - self._locale_name = locale_str(self.name) - if self._locale_description is None: - self._locale_description = locale_str(self.description) - - for param in self._params.values(): - param._convert_to_locale_strings() - - def __set_name__(self, owner: Type[Any], name: str) -> None: - self._attr = name - - @property - def callback(self) -> CommandCallback[GroupT, P, T]: - """:ref:`coroutine `: The coroutine that is executed when the command is called.""" - return self._callback - - def _copy_with( - self, - *, - parent: Optional[Group], - binding: GroupT, - bindings: MutableMapping[GroupT, GroupT] = MISSING, - set_on_binding: bool = True, - ) -> Command: - bindings = {} if bindings is MISSING else bindings - - copy = shallow_copy(self) - copy._params = self._params.copy() - copy.parent = parent - copy.binding = bindings.get(self.binding) if self.binding is not None else binding - - if copy._attr and set_on_binding: - setattr(copy.binding, copy._attr, copy) - - return copy - - async def get_translated_payload(self, translator: Translator) -> Dict[str, Any]: - base = self.to_dict() - name_localizations: Dict[str, str] = {} - description_localizations: Dict[str, str] = {} - - # Prevent creating these objects in a heavy loop - name_context = TranslationContext(location=TranslationContextLocation.command_name, data=self) - description_context = TranslationContext(location=TranslationContextLocation.command_description, data=self) - - for locale in Locale: - if self._locale_name: - translation = await translator._checked_translate(self._locale_name, locale, name_context) - if translation is not None: - name_localizations[locale.value] = translation - - if self._locale_description: - translation = await translator._checked_translate(self._locale_description, locale, description_context) - if translation is not None: - description_localizations[locale.value] = translation - - base['name_localizations'] = name_localizations - base['description_localizations'] = description_localizations - base['options'] = [ - await param.get_translated_payload(translator, Parameter(param, self)) for param in self._params.values() - ] - return base - - def to_dict(self) -> Dict[str, Any]: - # If we have a parent then our type is a subcommand - # Otherwise, the type falls back to the specific command type (e.g. slash command or context menu) - option_type = AppCommandType.chat_input.value if self.parent is None else AppCommandOptionType.subcommand.value - base: Dict[str, Any] = { - 'name': self.name, - 'description': self.description, - 'type': option_type, - 'options': [param.to_dict() for param in self._params.values()], - } - - if self.parent is None: - base['nsfw'] = self.nsfw - base['dm_permission'] = not self.guild_only - base['default_member_permissions'] = None if self.default_permissions is None else self.default_permissions.value - - return base - - async def _invoke_error_handlers(self, interaction: Interaction, error: AppCommandError) -> None: - # These type ignores are because the type checker can't narrow this type properly. - if self.on_error is not None: - if self.binding is not None: - await self.on_error(self.binding, interaction, error) # type: ignore - else: - await self.on_error(interaction, error) # type: ignore - - parent = self.parent - if parent is not None: - await parent.on_error(interaction, error) - - if parent.parent is not None: - await parent.parent.on_error(interaction, error) - - binding_error_handler = getattr(self.binding, '__discord_app_commands_error_handler__', None) - if binding_error_handler is not None: - await binding_error_handler(interaction, error) - - def _has_any_error_handlers(self) -> bool: - if self.on_error is not None: - return True - - parent = self.parent - if parent is not None: - # Check if the on_error is overridden - if not hasattr(parent.on_error, '__discord_app_commands_base_function__'): - return True - - if parent.parent is not None: - if not hasattr(parent.parent.on_error, '__discord_app_commands_base_function__'): - return True - - # Check if we have a bound error handler - if getattr(self.binding, '__discord_app_commands_error_handler__', None) is not None: - return True - - return False - - async def _transform_arguments(self, interaction: Interaction, namespace: Namespace) -> Dict[str, Any]: - values = namespace.__dict__ - transformed_values = {} - - for param in self._params.values(): - try: - value = values[param.display_name] - except KeyError: - if not param.required: - transformed_values[param.name] = param.default - else: - raise CommandSignatureMismatch(self) from None - else: - transformed_values[param.name] = await param.transform(interaction, value) - - return transformed_values - - async def _do_call(self, interaction: Interaction, params: Dict[str, Any]) -> T: - # These type ignores are because the type checker doesn't quite understand the narrowing here - # Likewise, it thinks we're missing positional arguments when there aren't any. - try: - if self.binding is not None: - return await self._callback(self.binding, interaction, **params) # type: ignore - return await self._callback(interaction, **params) # type: ignore - except TypeError as e: - # In order to detect mismatch from the provided signature and the Discord data, - # there are many ways it can go wrong yet all of them eventually lead to a TypeError - # from the Python compiler showcasing that the signature is incorrect. This lovely - # piece of code essentially checks the last frame of the caller and checks if the - # locals contains our `self` reference. - # - # This is because there is a possibility that a TypeError is raised within the body - # of the function, and in that case the locals wouldn't contain a reference to - # the command object under the name `self`. - frame = inspect.trace()[-1].frame - if frame.f_locals.get('self') is self: - raise CommandSignatureMismatch(self) from None - raise CommandInvokeError(self, e) from e - except AppCommandError: - raise - except Exception as e: - raise CommandInvokeError(self, e) from e - - async def _invoke_with_namespace(self, interaction: Interaction, namespace: Namespace) -> T: - if not await self._check_can_run(interaction): - raise CheckFailure(f'The check functions for command {self.name!r} failed.') - - transformed_values = await self._transform_arguments(interaction, namespace) - return await self._do_call(interaction, transformed_values) - - async def _invoke_autocomplete(self, interaction: Interaction, name: str, namespace: Namespace): - # The namespace contains the Discord provided names so this will be fine - # even if the name is renamed - value = namespace.__dict__[name] - - try: - param = self._params[name] - except KeyError: - # Slow case, it might be a rename - params = {param.display_name: param for param in self._params.values()} - try: - param = params[name] - except KeyError: - raise CommandSignatureMismatch(self) from None - - if param.autocomplete is None: - raise CommandSignatureMismatch(self) - - predicates = getattr(param.autocomplete, '__discord_app_commands_checks__', []) - if predicates: - try: - passed = await async_all(f(interaction) for f in predicates) - except Exception: - passed = False - - if not passed: - if not interaction.response.is_done(): - await interaction.response.autocomplete([]) - return - - if getattr(param.autocomplete, 'pass_command_binding', False): - binding = self.binding - if binding is not None: - choices = await param.autocomplete(binding, interaction, value) - else: - raise TypeError('autocomplete parameter expected a bound self parameter but one was not provided') - else: - choices = await param.autocomplete(interaction, value) - - if interaction.response.is_done(): - return - - await interaction.response.autocomplete(choices) - - def _get_internal_command(self, name: str) -> Optional[Union[Command, Group]]: - return None - - @property - def parameters(self) -> List[Parameter]: - """Returns a list of parameters for this command. - - This does not include the ``self`` or ``interaction`` parameters. - - Returns - -------- - List[:class:`Parameter`] - The parameters of this command. - """ - return [Parameter(p, self) for p in self._params.values()] - - def get_parameter(self, name: str) -> Optional[Parameter]: - """Retrieves a parameter by its name. - - The name must be the Python identifier rather than the renamed - one for display on Discord. - - Parameters - ----------- - name: :class:`str` - The parameter name in the callback function. - - Returns - -------- - Optional[:class:`Parameter`] - The parameter or ``None`` if not found. - """ - - parent = self._params.get(name) - if parent is not None: - return Parameter(parent, self) - return None - - @property - def root_parent(self) -> Optional[Group]: - """Optional[:class:`Group`]: The root parent of this command.""" - if self.parent is None: - return None - parent = self.parent - return parent.parent or parent - - @property - def qualified_name(self) -> str: - """:class:`str`: Returns the fully qualified command name. - - The qualified name includes the parent name as well. For example, - in a command like ``/foo bar`` the qualified name is ``foo bar``. - """ - # A B C - # ^ self - # ^ parent - # ^ grandparent - if self.parent is None: - return self.name - - names = [self.name, self.parent.name] - grandparent = self.parent.parent - if grandparent is not None: - names.append(grandparent.name) - - return ' '.join(reversed(names)) - - async def _check_can_run(self, interaction: Interaction) -> bool: - if self.parent is not None and self.parent is not self.binding: - # For commands with a parent which isn't the binding, i.e. - # - # - # - # The parent check needs to be called first - if not await maybe_coroutine(self.parent.interaction_check, interaction): - return False - - if self.binding is not None: - check: Optional[Check] = getattr(self.binding, 'interaction_check', None) - if check: - ret = await maybe_coroutine(check, interaction) # type: ignore # Probable pyright bug - if not ret: - return False - - predicates = self.checks - if not predicates: - return True - - return await async_all(f(interaction) for f in predicates) - - def error(self, coro: Error[GroupT]) -> Error[GroupT]: - """A decorator that registers a coroutine as a local error handler. - - The local error handler is called whenever an exception is raised in the body - of the command or during handling of the command. The error handler must take - 2 parameters, the interaction and the error. - - The error passed will be derived from :exc:`AppCommandError`. - - Parameters - ----------- - coro: :ref:`coroutine ` - The coroutine to register as the local error handler. - - Raises - ------- - TypeError - The coroutine passed is not actually a coroutine. - """ - - if not inspect.iscoroutinefunction(coro): - raise TypeError('The error handler must be a coroutine.') - - self.on_error = coro - return coro - - def autocomplete( - self, name: str - ) -> Callable[[AutocompleteCallback[GroupT, ChoiceT]], AutocompleteCallback[GroupT, ChoiceT]]: - """A decorator that registers a coroutine as an autocomplete prompt for a parameter. - - The coroutine callback must have 2 parameters, the :class:`~discord.Interaction`, - and the current value by the user (the string currently being typed by the user). - - To get the values from other parameters that may be filled in, accessing - :attr:`.Interaction.namespace` will give a :class:`Namespace` object with those - values. - - Parent :func:`checks ` are ignored within an autocomplete. However, checks can be added - to the autocomplete callback and the ones added will be called. If the checks fail for any reason - then an empty list is sent as the interaction response. - - The coroutine decorator **must** return a list of :class:`~discord.app_commands.Choice` objects. - Only up to 25 objects are supported. - - .. warning:: - The choices returned from this coroutine are suggestions. The user may ignore them and input their own value. - - Example: - - .. code-block:: python3 - - @app_commands.command() - async def fruits(interaction: discord.Interaction, fruit: str): - await interaction.response.send_message(f'Your favourite fruit seems to be {fruit}') - - @fruits.autocomplete('fruit') - async def fruits_autocomplete( - interaction: discord.Interaction, - current: str, - ) -> List[app_commands.Choice[str]]: - fruits = ['Banana', 'Pineapple', 'Apple', 'Watermelon', 'Melon', 'Cherry'] - return [ - app_commands.Choice(name=fruit, value=fruit) - for fruit in fruits if current.lower() in fruit.lower() - ] - - - Parameters - ----------- - name: :class:`str` - The parameter name to register as autocomplete. - - Raises - ------- - TypeError - The coroutine passed is not actually a coroutine or - the parameter is not found or of an invalid type. - """ - - def decorator(coro: AutocompleteCallback[GroupT, ChoiceT]) -> AutocompleteCallback[GroupT, ChoiceT]: - if not inspect.iscoroutinefunction(coro): - raise TypeError('The error handler must be a coroutine.') - - try: - param = self._params[name] - except KeyError: - raise TypeError(f'unknown parameter: {name!r}') from None - - if param.type not in (AppCommandOptionType.string, AppCommandOptionType.number, AppCommandOptionType.integer): - raise TypeError('autocomplete is only supported for integer, string, or number option types') - - if param.is_choice_annotation(): - raise TypeError( - 'Choice annotation unsupported for autocomplete parameters, consider using a regular annotation instead' - ) - - param.autocomplete = validate_auto_complete_callback(coro) - return coro - - return decorator - - def add_check(self, func: Check, /) -> None: - """Adds a check to the command. - - This is the non-decorator interface to :func:`check`. - - Parameters - ----------- - func - The function that will be used as a check. - """ - - self.checks.append(func) - - def remove_check(self, func: Check, /) -> None: - """Removes a check from the command. - - This function is idempotent and will not raise an exception - if the function is not in the command's checks. - - Parameters - ----------- - func - The function to remove from the checks. - """ - - try: - self.checks.remove(func) - except ValueError: - pass - - -class ContextMenu: - """A class that implements a context menu application command. - - These are usually not created manually, instead they are created using - one of the following decorators: - - - :func:`~discord.app_commands.context_menu` - - :meth:`CommandTree.context_menu ` - - .. versionadded:: 2.0 - - Parameters - ----------- - name: Union[:class:`str`, :class:`locale_str`] - The name of the context menu. - callback: :ref:`coroutine ` - The coroutine that is executed when the command is called. - type: :class:`.AppCommandType` - The type of context menu application command. By default, this is inferred - by the parameter of the callback. - auto_locale_strings: :class:`bool` - If this is set to ``True``, then all translatable strings will implicitly - be wrapped into :class:`locale_str` rather than :class:`str`. This could - avoid some repetition and be more ergonomic for certain defaults such - as default command names, command descriptions, and parameter names. - Defaults to ``True``. - nsfw: :class:`bool` - Whether the command is NSFW and should only work in NSFW channels. - Defaults to ``False``. - extras: :class:`dict` - A dictionary that can be used to store extraneous data. - The library will not touch any values or keys within this dictionary. - - Attributes - ------------ - name: :class:`str` - The name of the context menu. - type: :class:`.AppCommandType` - The type of context menu application command. By default, this is inferred - by the parameter of the callback. - default_permissions: Optional[:class:`~discord.Permissions`] - The default permissions that can execute this command on Discord. Note - that server administrators can override this value in the client. - Setting an empty permissions field will disallow anyone except server - administrators from using the command in a guild. - guild_only: :class:`bool` - Whether the command should only be usable in guild contexts. - Defaults to ``False``. - nsfw: :class:`bool` - Whether the command is NSFW and should only work in NSFW channels. - Defaults to ``False``. - checks - A list of predicates that take a :class:`~discord.Interaction` parameter - to indicate whether the command callback should be executed. If an exception - is necessary to be thrown to signal failure, then one inherited from - :exc:`AppCommandError` should be used. If all the checks fail without - propagating an exception, :exc:`CheckFailure` is raised. - extras: :class:`dict` - A dictionary that can be used to store extraneous data. - The library will not touch any values or keys within this dictionary. - """ - - def __init__( - self, - *, - name: Union[str, locale_str], - callback: ContextMenuCallback, - type: AppCommandType = MISSING, - nsfw: bool = False, - guild_ids: Optional[List[int]] = None, - auto_locale_strings: bool = True, - extras: Dict[Any, Any] = MISSING, - ): - name, locale = (name.message, name) if isinstance(name, locale_str) else (name, None) - self.name: str = validate_context_menu_name(name) - self._locale_name: Optional[locale_str] = locale - self._callback: ContextMenuCallback = callback - (param, annotation, actual_type) = _get_context_menu_parameter(callback) - if type is MISSING: - type = actual_type - - if actual_type != type: - raise ValueError(f'context menu callback implies a type of {actual_type} but {type} was passed.') - - self.type: AppCommandType = type - self._param_name = param - self._annotation = annotation - self.module: Optional[str] = callback.__module__ - self._guild_ids = guild_ids or getattr(callback, '__discord_app_commands_default_guilds__', None) - self.on_error: Optional[UnboundError] = None - self.default_permissions: Optional[Permissions] = getattr( - callback, '__discord_app_commands_default_permissions__', None - ) - self.nsfw: bool = nsfw - self.guild_only: bool = getattr(callback, '__discord_app_commands_guild_only__', False) - self.checks: List[Check] = getattr(callback, '__discord_app_commands_checks__', []) - self.extras: Dict[Any, Any] = extras or {} - - if auto_locale_strings: - if self._locale_name is None: - self._locale_name = locale_str(self.name) - - @property - def callback(self) -> ContextMenuCallback: - """:ref:`coroutine `: The coroutine that is executed when the context menu is called.""" - return self._callback - - @property - def qualified_name(self) -> str: - """:class:`str`: Returns the fully qualified command name.""" - return self.name - - async def get_translated_payload(self, translator: Translator) -> Dict[str, Any]: - base = self.to_dict() - context = TranslationContext(location=TranslationContextLocation.command_name, data=self) - if self._locale_name: - name_localizations: Dict[str, str] = {} - for locale in Locale: - translation = await translator._checked_translate(self._locale_name, locale, context) - if translation is not None: - name_localizations[locale.value] = translation - - base['name_localizations'] = name_localizations - return base - - def to_dict(self) -> Dict[str, Any]: - return { - 'name': self.name, - 'type': self.type.value, - 'dm_permission': not self.guild_only, - 'default_member_permissions': None if self.default_permissions is None else self.default_permissions.value, - 'nsfw': self.nsfw, - } - - async def _check_can_run(self, interaction: Interaction) -> bool: - predicates = self.checks - if not predicates: - return True - - return await async_all(f(interaction) for f in predicates) - - def _has_any_error_handlers(self) -> bool: - return self.on_error is not None - - async def _invoke(self, interaction: Interaction, arg: Any): - try: - if not await self._check_can_run(interaction): - raise CheckFailure(f'The check functions for context menu {self.name!r} failed.') - - await self._callback(interaction, arg) - except AppCommandError: - raise - except Exception as e: - raise CommandInvokeError(self, e) from e - - def error(self, coro: UnboundError) -> UnboundError: - """A decorator that registers a coroutine as a local error handler. - - The local error handler is called whenever an exception is raised in the body - of the command or during handling of the command. The error handler must take - 2 parameters, the interaction and the error. - - The error passed will be derived from :exc:`AppCommandError`. - - Parameters - ----------- - coro: :ref:`coroutine ` - The coroutine to register as the local error handler. - - Raises - ------- - TypeError - The coroutine passed is not actually a coroutine. - """ - - if not inspect.iscoroutinefunction(coro): - raise TypeError('The error handler must be a coroutine.') - - self.on_error = coro - return coro - - def add_check(self, func: Check, /) -> None: - """Adds a check to the command. - - This is the non-decorator interface to :func:`check`. - - Parameters - ----------- - func - The function that will be used as a check. - """ - - self.checks.append(func) - - def remove_check(self, func: Check, /) -> None: - """Removes a check from the command. - - This function is idempotent and will not raise an exception - if the function is not in the command's checks. - - Parameters - ----------- - func - The function to remove from the checks. - """ - - try: - self.checks.remove(func) - except ValueError: - pass - - -class Group: - """A class that implements an application command group. - - These are usually inherited rather than created manually. - - Decorators such as :func:`guild_only`, :func:`guilds`, and :func:`default_permissions` - will apply to the group if used on top of a subclass. For example: - - .. code-block:: python3 - - from discord import app_commands - - @app_commands.guild_only() - class MyGroup(app_commands.Group): - pass - - .. versionadded:: 2.0 - - Parameters - ----------- - name: Union[:class:`str`, :class:`locale_str`] - The name of the group. If not given, it defaults to a lower-case - kebab-case version of the class name. - description: Union[:class:`str`, :class:`locale_str`] - The description of the group. This shows up in the UI to describe - the group. If not given, it defaults to the docstring of the - class shortened to 100 characters. - auto_locale_strings: :class:`bool` - If this is set to ``True``, then all translatable strings will implicitly - be wrapped into :class:`locale_str` rather than :class:`str`. This could - avoid some repetition and be more ergonomic for certain defaults such - as default command names, command descriptions, and parameter names. - Defaults to ``True``. - default_permissions: Optional[:class:`~discord.Permissions`] - The default permissions that can execute this group on Discord. Note - that server administrators can override this value in the client. - Setting an empty permissions field will disallow anyone except server - administrators from using the command in a guild. - - Due to a Discord limitation, this does not work on subcommands. - guild_only: :class:`bool` - Whether the group should only be usable in guild contexts. - Defaults to ``False``. - - Due to a Discord limitation, this does not work on subcommands. - nsfw: :class:`bool` - Whether the command is NSFW and should only work in NSFW channels. - Defaults to ``False``. - - Due to a Discord limitation, this does not work on subcommands. - parent: Optional[:class:`Group`] - The parent application command. ``None`` if there isn't one. - extras: :class:`dict` - A dictionary that can be used to store extraneous data. - The library will not touch any values or keys within this dictionary. - - Attributes - ------------ - name: :class:`str` - The name of the group. - description: :class:`str` - The description of the group. This shows up in the UI to describe - the group. - default_permissions: Optional[:class:`~discord.Permissions`] - The default permissions that can execute this group on Discord. Note - that server administrators can override this value in the client. - Setting an empty permissions field will disallow anyone except server - administrators from using the command in a guild. - - Due to a Discord limitation, this does not work on subcommands. - guild_only: :class:`bool` - Whether the group should only be usable in guild contexts. - - Due to a Discord limitation, this does not work on subcommands. - nsfw: :class:`bool` - Whether the command is NSFW and should only work in NSFW channels. - - Due to a Discord limitation, this does not work on subcommands. - parent: Optional[:class:`Group`] - The parent group. ``None`` if there isn't one. - extras: :class:`dict` - A dictionary that can be used to store extraneous data. - The library will not touch any values or keys within this dictionary. - """ - - __discord_app_commands_group_children__: ClassVar[List[Union[Command[Any, ..., Any], Group]]] = [] - __discord_app_commands_skip_init_binding__: bool = False - __discord_app_commands_group_name__: str = MISSING - __discord_app_commands_group_description__: str = MISSING - __discord_app_commands_group_locale_name__: Optional[locale_str] = None - __discord_app_commands_group_locale_description__: Optional[locale_str] = None - __discord_app_commands_group_nsfw__: bool = False - __discord_app_commands_guild_only__: bool = MISSING - __discord_app_commands_default_permissions__: Optional[Permissions] = MISSING - __discord_app_commands_has_module__: bool = False - __discord_app_commands_error_handler__: Optional[ - Callable[[Interaction, AppCommandError], Coroutine[Any, Any, None]] - ] = None - - def __init_subclass__( - cls, - *, - name: Union[str, locale_str] = MISSING, - description: Union[str, locale_str] = MISSING, - guild_only: bool = MISSING, - nsfw: bool = False, - default_permissions: Optional[Permissions] = MISSING, - ) -> None: - if not cls.__discord_app_commands_group_children__: - children: List[Union[Command[Any, ..., Any], Group]] = [ - member for member in cls.__dict__.values() if isinstance(member, (Group, Command)) and member.parent is None - ] - - cls.__discord_app_commands_group_children__ = children - - found = set() - for child in children: - if child.name in found: - raise TypeError(f'Command {child.name!r} is a duplicate') - found.add(child.name) - - if len(children) > 25: - raise TypeError('groups cannot have more than 25 commands') - - if name is MISSING: - cls.__discord_app_commands_group_name__ = validate_name(_to_kebab_case(cls.__name__)) - elif isinstance(name, str): - cls.__discord_app_commands_group_name__ = validate_name(name) - else: - cls.__discord_app_commands_group_name__ = validate_name(name.message) - cls.__discord_app_commands_group_locale_name__ = name - - if description is MISSING: - if cls.__doc__ is None: - cls.__discord_app_commands_group_description__ = '…' - else: - cls.__discord_app_commands_group_description__ = _shorten(cls.__doc__) - elif isinstance(description, str): - cls.__discord_app_commands_group_description__ = description - else: - cls.__discord_app_commands_group_description__ = description.message - cls.__discord_app_commands_group_locale_description__ = description - - if guild_only is not MISSING: - cls.__discord_app_commands_guild_only__ = guild_only - - if default_permissions is not MISSING: - cls.__discord_app_commands_default_permissions__ = default_permissions - - if cls.__module__ != __name__: - cls.__discord_app_commands_has_module__ = True - cls.__discord_app_commands_group_nsfw__ = nsfw - - def __init__( - self, - *, - name: Union[str, locale_str] = MISSING, - description: Union[str, locale_str] = MISSING, - parent: Optional[Group] = None, - guild_ids: Optional[List[int]] = None, - guild_only: bool = MISSING, - nsfw: bool = MISSING, - auto_locale_strings: bool = True, - default_permissions: Optional[Permissions] = MISSING, - extras: Dict[Any, Any] = MISSING, - ): - cls = self.__class__ - - if name is MISSING: - name, locale = cls.__discord_app_commands_group_name__, cls.__discord_app_commands_group_locale_name__ - elif isinstance(name, str): - name, locale = validate_name(name), None - else: - name, locale = validate_name(name.message), name - self.name: str = name - self._locale_name: Optional[locale_str] = locale - - if description is MISSING: - description, locale = ( - cls.__discord_app_commands_group_description__, - cls.__discord_app_commands_group_locale_description__, - ) - elif isinstance(description, str): - description, locale = description, None - else: - description, locale = description.message, description - self.description: str = description - self._locale_description: Optional[locale_str] = locale - - self._attr: Optional[str] = None - self._owner_cls: Optional[Type[Any]] = None - self._guild_ids: Optional[List[int]] = guild_ids or getattr(cls, '__discord_app_commands_default_guilds__', None) - - if default_permissions is MISSING: - if cls.__discord_app_commands_default_permissions__ is MISSING: - default_permissions = None - else: - default_permissions = cls.__discord_app_commands_default_permissions__ - - self.default_permissions: Optional[Permissions] = default_permissions - - if guild_only is MISSING: - if cls.__discord_app_commands_guild_only__ is MISSING: - guild_only = False - else: - guild_only = cls.__discord_app_commands_guild_only__ - - self.guild_only: bool = guild_only - - if nsfw is MISSING: - nsfw = cls.__discord_app_commands_group_nsfw__ - - self.nsfw: bool = nsfw - - if not self.description: - raise TypeError('groups must have a description') - - self.parent: Optional[Group] = parent - self.module: Optional[str] - if cls.__discord_app_commands_has_module__: - self.module = cls.__module__ - else: - try: - # This is pretty hacky - # It allows the module to be fetched if someone just constructs a bare Group object though. - self.module = inspect.currentframe().f_back.f_globals['__name__'] # type: ignore - except (AttributeError, IndexError, KeyError): - self.module = None - - self._children: Dict[str, Union[Command, Group]] = {} - self.extras: Dict[Any, Any] = extras or {} - - bindings: Dict[Group, Group] = {} - - for child in self.__discord_app_commands_group_children__: - # commands and groups created directly in this class (no parent) - copy = ( - child._copy_with(parent=self, binding=self, bindings=bindings, set_on_binding=False) - if not cls.__discord_app_commands_skip_init_binding__ - else child - ) - - self._children[copy.name] = copy - if copy._attr and not cls.__discord_app_commands_skip_init_binding__: - setattr(self, copy._attr, copy) - - if parent is not None: - if parent.parent is not None: - raise ValueError('groups can only be nested at most one level') - parent.add_command(self) - - if auto_locale_strings: - self._convert_to_locale_strings() - - def _convert_to_locale_strings(self) -> None: - if self._locale_name is None: - self._locale_name = locale_str(self.name) - if self._locale_description is None: - self._locale_description = locale_str(self.description) - - # I don't know if propagating to the children is the right behaviour here. - - def __set_name__(self, owner: Type[Any], name: str) -> None: - self._attr = name - self.module = owner.__module__ - self._owner_cls = owner - - def _copy_with( - self, - *, - parent: Optional[Group], - binding: Binding, - bindings: MutableMapping[Group, Group] = MISSING, - set_on_binding: bool = True, - ) -> Group: - bindings = {} if bindings is MISSING else bindings - - copy = shallow_copy(self) - copy.parent = parent - copy._children = {} - - bindings[self] = copy - - for child in self._children.values(): - child_copy = child._copy_with(parent=copy, binding=binding, bindings=bindings) - child_copy.parent = copy - copy._children[child_copy.name] = child_copy - - if isinstance(child_copy, Group) and child_copy._attr and set_on_binding: - if binding.__class__ is child_copy._owner_cls: - setattr(binding, child_copy._attr, child_copy) - elif child_copy._owner_cls is copy.__class__: - setattr(copy, child_copy._attr, child_copy) - - if copy._attr and set_on_binding: - setattr(parent or binding, copy._attr, copy) - - return copy - - async def get_translated_payload(self, translator: Translator) -> Dict[str, Any]: - base = self.to_dict() - name_localizations: Dict[str, str] = {} - description_localizations: Dict[str, str] = {} - - # Prevent creating these objects in a heavy loop - name_context = TranslationContext(location=TranslationContextLocation.group_name, data=self) - description_context = TranslationContext(location=TranslationContextLocation.group_description, data=self) - for locale in Locale: - if self._locale_name: - translation = await translator._checked_translate(self._locale_name, locale, name_context) - if translation is not None: - name_localizations[locale.value] = translation - - if self._locale_description: - translation = await translator._checked_translate(self._locale_description, locale, description_context) - if translation is not None: - description_localizations[locale.value] = translation - - base['name_localizations'] = name_localizations - base['description_localizations'] = description_localizations - base['options'] = [await child.get_translated_payload(translator) for child in self._children.values()] - return base - - def to_dict(self) -> Dict[str, Any]: - # If this has a parent command then it's part of a subcommand group - # Otherwise, it's just a regular command - option_type = 1 if self.parent is None else AppCommandOptionType.subcommand_group.value - base: Dict[str, Any] = { - 'name': self.name, - 'description': self.description, - 'type': option_type, - 'options': [child.to_dict() for child in self._children.values()], - } - - if self.parent is None: - base['nsfw'] = self.nsfw - base['dm_permission'] = not self.guild_only - base['default_member_permissions'] = None if self.default_permissions is None else self.default_permissions.value - - return base - - @property - def root_parent(self) -> Optional[Group]: - """Optional[:class:`Group`]: The parent of this group.""" - return self.parent - - @property - def qualified_name(self) -> str: - """:class:`str`: Returns the fully qualified group name. - - The qualified name includes the parent name as well. For example, - in a group like ``/foo bar`` the qualified name is ``foo bar``. - """ - - if self.parent is None: - return self.name - return f'{self.parent.name} {self.name}' - - def _get_internal_command(self, name: str) -> Optional[Union[Command[Any, ..., Any], Group]]: - return self._children.get(name) - - @property - def commands(self) -> List[Union[Command[Any, ..., Any], Group]]: - """List[Union[:class:`Command`, :class:`Group`]]: The commands that this group contains.""" - return list(self._children.values()) - - def walk_commands(self) -> Generator[Union[Command[Any, ..., Any], Group], None, None]: - """An iterator that recursively walks through all commands that this group contains. - - Yields - --------- - Union[:class:`Command`, :class:`Group`] - The commands in this group. - """ - - for command in self._children.values(): - yield command - if isinstance(command, Group): - yield from command.walk_commands() - - @mark_overrideable - async def on_error(self, interaction: Interaction, error: AppCommandError, /) -> None: - """|coro| - - A callback that is called when a child's command raises an :exc:`AppCommandError`. - - To get the command that failed, :attr:`discord.Interaction.command` should be used. - - The default implementation does nothing. - - Parameters - ----------- - interaction: :class:`~discord.Interaction` - The interaction that is being handled. - error: :exc:`AppCommandError` - The exception that was raised. - """ - - pass - - def error(self, coro: ErrorFunc) -> ErrorFunc: - """A decorator that registers a coroutine as a local error handler. - - The local error handler is called whenever an exception is raised in a child command. - The error handler must take 2 parameters, the interaction and the error. - - The error passed will be derived from :exc:`AppCommandError`. - - Parameters - ----------- - coro: :ref:`coroutine ` - The coroutine to register as the local error handler. - - Raises - ------- - TypeError - The coroutine passed is not actually a coroutine, or is an invalid coroutine. - """ - - if not inspect.iscoroutinefunction(coro): - raise TypeError('The error handler must be a coroutine.') - - params = inspect.signature(coro).parameters - if len(params) != 2: - raise TypeError('The error handler must have 2 parameters.') - - self.on_error = coro - return coro - - async def interaction_check(self, interaction: Interaction, /) -> bool: - """|coro| - - A callback that is called when an interaction happens within the group - that checks whether a command inside the group should be executed. - - This is useful to override if, for example, you want to ensure that the - interaction author is a given user. - - The default implementation of this returns ``True``. - - .. note:: - - If an exception occurs within the body then the check - is considered a failure and error handlers such as - :meth:`on_error` is called. See :exc:`AppCommandError` - for more information. - - Parameters - ----------- - interaction: :class:`~discord.Interaction` - The interaction that occurred. - - Returns - --------- - :class:`bool` - Whether the view children's callbacks should be called. - """ - - return True - - def add_command(self, command: Union[Command[Any, ..., Any], Group], /, *, override: bool = False) -> None: - """Adds a command or group to this group's internal list of commands. - - Parameters - ----------- - command: Union[:class:`Command`, :class:`Group`] - The command or group to add. - override: :class:`bool` - Whether to override a pre-existing command or group with the same name. - If ``False`` then an exception is raised. - - Raises - ------- - CommandAlreadyRegistered - The command or group is already registered. Note that the :attr:`CommandAlreadyRegistered.guild_id` - attribute will always be ``None`` in this case. - ValueError - There are too many commands already registered or the group is too - deeply nested. - TypeError - The wrong command type was passed. - """ - - if not isinstance(command, (Command, Group)): - raise TypeError(f'expected Command or Group not {command.__class__.__name__}') - - if isinstance(command, Group) and self.parent is not None: - # In a tree like so: - # - # - # - # this needs to be forbidden - raise ValueError(f'{command.name!r} is too nested, groups can only be nested at most one level') - - if not override and command.name in self._children: - raise CommandAlreadyRegistered(command.name, guild_id=None) - - self._children[command.name] = command - command.parent = self - if len(self._children) > 25: - raise ValueError('maximum number of child commands exceeded') - - def remove_command(self, name: str, /) -> Optional[Union[Command[Any, ..., Any], Group]]: - """Removes a command or group from the internal list of commands. - - Parameters - ----------- - name: :class:`str` - The name of the command or group to remove. - - Returns - -------- - Optional[Union[:class:`~discord.app_commands.Command`, :class:`~discord.app_commands.Group`]] - The command that was removed. If nothing was removed - then ``None`` is returned instead. - """ - - self._children.pop(name, None) - - def get_command(self, name: str, /) -> Optional[Union[Command[Any, ..., Any], Group]]: - """Retrieves a command or group from its name. - - Parameters - ----------- - name: :class:`str` - The name of the command or group to retrieve. - - Returns - -------- - Optional[Union[:class:`~discord.app_commands.Command`, :class:`~discord.app_commands.Group`]] - The command or group that was retrieved. If nothing was found - then ``None`` is returned instead. - """ - return self._children.get(name) - - def command( - self, - *, - name: Union[str, locale_str] = MISSING, - description: Union[str, locale_str] = MISSING, - nsfw: bool = False, - auto_locale_strings: bool = True, - extras: Dict[Any, Any] = MISSING, - ) -> Callable[[CommandCallback[GroupT, P, T]], Command[GroupT, P, T]]: - """A decorator that creates an application command from a regular function under this group. - - Parameters - ------------ - name: Union[:class:`str`, :class:`locale_str`] - The name of the application command. If not given, it defaults to a lower-case - version of the callback name. - description: Union[:class:`str`, :class:`locale_str`] - The description of the application command. This shows up in the UI to describe - the application command. If not given, it defaults to the first line of the docstring - of the callback shortened to 100 characters. - nsfw: :class:`bool` - Whether the command is NSFW and should only work in NSFW channels. Defaults to ``False``. - auto_locale_strings: :class:`bool` - If this is set to ``True``, then all translatable strings will implicitly - be wrapped into :class:`locale_str` rather than :class:`str`. This could - avoid some repetition and be more ergonomic for certain defaults such - as default command names, command descriptions, and parameter names. - Defaults to ``True``. - extras: :class:`dict` - A dictionary that can be used to store extraneous data. - The library will not touch any values or keys within this dictionary. - """ - - def decorator(func: CommandCallback[GroupT, P, T]) -> Command[GroupT, P, T]: - if not inspect.iscoroutinefunction(func): - raise TypeError('command function must be a coroutine function') - - if description is MISSING: - if func.__doc__ is None: - desc = '…' - else: - desc = _shorten(func.__doc__) - else: - desc = description - - command = Command( - name=name if name is not MISSING else func.__name__, - description=desc, - callback=func, - nsfw=nsfw, - parent=self, - auto_locale_strings=auto_locale_strings, - extras=extras, - ) - self.add_command(command) - return command - - return decorator - - -def command( - *, - name: Union[str, locale_str] = MISSING, - description: Union[str, locale_str] = MISSING, - nsfw: bool = False, - auto_locale_strings: bool = True, - extras: Dict[Any, Any] = MISSING, -) -> Callable[[CommandCallback[GroupT, P, T]], Command[GroupT, P, T]]: - """Creates an application command from a regular function. - - Parameters - ------------ - name: :class:`str` - The name of the application command. If not given, it defaults to a lower-case - version of the callback name. - description: :class:`str` - The description of the application command. This shows up in the UI to describe - the application command. If not given, it defaults to the first line of the docstring - of the callback shortened to 100 characters. - nsfw: :class:`bool` - Whether the command is NSFW and should only work in NSFW channels. Defaults to ``False``. - - Due to a Discord limitation, this does not work on subcommands. - auto_locale_strings: :class:`bool` - If this is set to ``True``, then all translatable strings will implicitly - be wrapped into :class:`locale_str` rather than :class:`str`. This could - avoid some repetition and be more ergonomic for certain defaults such - as default command names, command descriptions, and parameter names. - Defaults to ``True``. - extras: :class:`dict` - A dictionary that can be used to store extraneous data. - The library will not touch any values or keys within this dictionary. - """ - - def decorator(func: CommandCallback[GroupT, P, T]) -> Command[GroupT, P, T]: - if not inspect.iscoroutinefunction(func): - raise TypeError('command function must be a coroutine function') - - if description is MISSING: - if func.__doc__ is None: - desc = '…' - else: - desc = _shorten(func.__doc__) - else: - desc = description - - return Command( - name=name if name is not MISSING else func.__name__, - description=desc, - callback=func, - parent=None, - nsfw=nsfw, - auto_locale_strings=auto_locale_strings, - extras=extras, - ) - - return decorator - - -def context_menu( - *, - name: Union[str, locale_str] = MISSING, - nsfw: bool = False, - auto_locale_strings: bool = True, - extras: Dict[Any, Any] = MISSING, -) -> Callable[[ContextMenuCallback], ContextMenu]: - """Creates an application command context menu from a regular function. - - This function must have a signature of :class:`~discord.Interaction` as its first parameter - and taking either a :class:`~discord.Member`, :class:`~discord.User`, or :class:`~discord.Message`, - or a :obj:`typing.Union` of ``Member`` and ``User`` as its second parameter. - - Examples - --------- - - .. code-block:: python3 - - @app_commands.context_menu() - async def react(interaction: discord.Interaction, message: discord.Message): - await interaction.response.send_message('Very cool message!', ephemeral=True) - - @app_commands.context_menu() - async def ban(interaction: discord.Interaction, user: discord.Member): - await interaction.response.send_message(f'Should I actually ban {user}...', ephemeral=True) - - Parameters - ------------ - name: Union[:class:`str`, :class:`locale_str`] - The name of the context menu command. If not given, it defaults to a title-case - version of the callback name. Note that unlike regular slash commands this can - have spaces and upper case characters in the name. - nsfw: :class:`bool` - Whether the command is NSFW and should only work in NSFW channels. Defaults to ``False``. - - Due to a Discord limitation, this does not work on subcommands. - auto_locale_strings: :class:`bool` - If this is set to ``True``, then all translatable strings will implicitly - be wrapped into :class:`locale_str` rather than :class:`str`. This could - avoid some repetition and be more ergonomic for certain defaults such - as default command names, command descriptions, and parameter names. - Defaults to ``True``. - extras: :class:`dict` - A dictionary that can be used to store extraneous data. - The library will not touch any values or keys within this dictionary. - """ - - def decorator(func: ContextMenuCallback) -> ContextMenu: - if not inspect.iscoroutinefunction(func): - raise TypeError('context menu function must be a coroutine function') - - actual_name = func.__name__.title() if name is MISSING else name - return ContextMenu( - name=actual_name, - nsfw=nsfw, - callback=func, - auto_locale_strings=auto_locale_strings, - extras=extras, - ) - - return decorator - - -def describe(**parameters: Union[str, locale_str]) -> Callable[[T], T]: - r'''Describes the given parameters by their name using the key of the keyword argument - as the name. - - Example: - - .. code-block:: python3 - - @app_commands.command(description='Bans a member') - @app_commands.describe(member='the member to ban') - async def ban(interaction: discord.Interaction, member: discord.Member): - await interaction.response.send_message(f'Banned {member}') - - Alternatively, you can describe parameters using Google, Sphinx, or Numpy style docstrings. - - Example: - - .. code-block:: python3 - - @app_commands.command() - async def ban(interaction: discord.Interaction, member: discord.Member): - """Bans a member - - Parameters - ----------- - member: discord.Member - the member to ban - """ - await interaction.response.send_message(f'Banned {member}') - - Parameters - ----------- - \*\*parameters: Union[:class:`str`, :class:`locale_str`] - The description of the parameters. - - Raises - -------- - TypeError - The parameter name is not found. - ''' - - def decorator(inner: T) -> T: - if isinstance(inner, Command): - _populate_descriptions(inner._params, parameters) - else: - try: - inner.__discord_app_commands_param_description__.update(parameters) # type: ignore # Runtime attribute access - except AttributeError: - inner.__discord_app_commands_param_description__ = parameters # type: ignore # Runtime attribute assignment - - return inner - - return decorator - - -def rename(**parameters: Union[str, locale_str]) -> Callable[[T], T]: - r"""Renames the given parameters by their name using the key of the keyword argument - as the name. - - This renames the parameter within the Discord UI. When referring to the parameter in other - decorators, the parameter name used in the function is used instead of the renamed one. - - Example: - - .. code-block:: python3 - - @app_commands.command() - @app_commands.rename(the_member_to_ban='member') - async def ban(interaction: discord.Interaction, the_member_to_ban: discord.Member): - await interaction.response.send_message(f'Banned {the_member_to_ban}') - - Parameters - ----------- - \*\*parameters: Union[:class:`str`, :class:`locale_str`] - The name of the parameters. - - Raises - -------- - ValueError - The parameter name is already used by another parameter. - TypeError - The parameter name is not found. - """ - - def decorator(inner: T) -> T: - if isinstance(inner, Command): - _populate_renames(inner._params, parameters) - else: - try: - inner.__discord_app_commands_param_rename__.update(parameters) # type: ignore # Runtime attribute access - except AttributeError: - inner.__discord_app_commands_param_rename__ = parameters # type: ignore # Runtime attribute assignment - - return inner - - return decorator - - -def choices(**parameters: List[Choice[ChoiceT]]) -> Callable[[T], T]: - r"""Instructs the given parameters by their name to use the given choices for their choices. - - Example: - - .. code-block:: python3 - - @app_commands.command() - @app_commands.describe(fruits='fruits to choose from') - @app_commands.choices(fruits=[ - Choice(name='apple', value=1), - Choice(name='banana', value=2), - Choice(name='cherry', value=3), - ]) - async def fruit(interaction: discord.Interaction, fruits: Choice[int]): - await interaction.response.send_message(f'Your favourite fruit is {fruits.name}.') - - .. note:: - - This is not the only way to provide choices to a command. There are two more ergonomic ways - of doing this. The first one is to use a :obj:`typing.Literal` annotation: - - .. code-block:: python3 - - @app_commands.command() - @app_commands.describe(fruits='fruits to choose from') - async def fruit(interaction: discord.Interaction, fruits: Literal['apple', 'banana', 'cherry']): - await interaction.response.send_message(f'Your favourite fruit is {fruits}.') - - The second way is to use an :class:`enum.Enum`: - - .. code-block:: python3 - - class Fruits(enum.Enum): - apple = 1 - banana = 2 - cherry = 3 - - @app_commands.command() - @app_commands.describe(fruits='fruits to choose from') - async def fruit(interaction: discord.Interaction, fruits: Fruits): - await interaction.response.send_message(f'Your favourite fruit is {fruits}.') - - - Parameters - ----------- - \*\*parameters - The choices of the parameters. - - Raises - -------- - TypeError - The parameter name is not found or the parameter type was incorrect. - """ - - def decorator(inner: T) -> T: - if isinstance(inner, Command): - _populate_choices(inner._params, parameters) - else: - try: - inner.__discord_app_commands_param_choices__.update(parameters) # type: ignore # Runtime attribute access - except AttributeError: - inner.__discord_app_commands_param_choices__ = parameters # type: ignore # Runtime attribute assignment - - return inner - - return decorator - - -def autocomplete(**parameters: AutocompleteCallback[GroupT, ChoiceT]) -> Callable[[T], T]: - r"""Associates the given parameters with the given autocomplete callback. - - Autocomplete is only supported on types that have :class:`str`, :class:`int`, or :class:`float` - values. - - :func:`Checks ` are supported, however they must be attached to the autocomplete - callback in order to work. Checks attached to the command are ignored when invoking the autocomplete - callback. - - For more information, see the :meth:`Command.autocomplete` documentation. - - .. warning:: - The choices returned from this coroutine are suggestions. The user may ignore them and input their own value. - - Example: - - .. code-block:: python3 - - async def fruit_autocomplete( - interaction: discord.Interaction, - current: str, - ) -> List[app_commands.Choice[str]]: - fruits = ['Banana', 'Pineapple', 'Apple', 'Watermelon', 'Melon', 'Cherry'] - return [ - app_commands.Choice(name=fruit, value=fruit) - for fruit in fruits if current.lower() in fruit.lower() - ] - - @app_commands.command() - @app_commands.autocomplete(fruit=fruit_autocomplete) - async def fruits(interaction: discord.Interaction, fruit: str): - await interaction.response.send_message(f'Your favourite fruit seems to be {fruit}') - - Parameters - ----------- - \*\*parameters - The parameters to mark as autocomplete. - - Raises - -------- - TypeError - The parameter name is not found or the parameter type was incorrect. - """ - - def decorator(inner: T) -> T: - if isinstance(inner, Command): - _populate_autocomplete(inner._params, parameters) - else: - try: - inner.__discord_app_commands_param_autocomplete__.update(parameters) # type: ignore # Runtime attribute access - except AttributeError: - inner.__discord_app_commands_param_autocomplete__ = parameters # type: ignore # Runtime attribute assignment - - return inner - - return decorator - - -def guilds(*guild_ids: Union[Snowflake, int]) -> Callable[[T], T]: - r"""Associates the given guilds with the command. - - When the command instance is added to a :class:`CommandTree`, the guilds that are - specified by this decorator become the default guilds that it's added to rather - than being a global command. - - .. note:: - - Due to an implementation quirk and Python limitation, if this is used in conjunction - with the :meth:`CommandTree.command` or :meth:`CommandTree.context_menu` decorator - then this must go below that decorator. - - Example: - - .. code-block:: python3 - - MY_GUILD_ID = discord.Object(...) # Guild ID here - - @app_commands.command() - @app_commands.guilds(MY_GUILD_ID) - async def bonk(interaction: discord.Interaction): - await interaction.response.send_message('Bonk', ephemeral=True) - - Parameters - ----------- - \*guild_ids: Union[:class:`int`, :class:`~discord.abc.Snowflake`] - The guilds to associate this command with. The command tree will - use this as the default when added rather than adding it as a global - command. - """ - - defaults: List[int] = [g if isinstance(g, int) else g.id for g in guild_ids] - - def decorator(inner: T) -> T: - if isinstance(inner, (Group, ContextMenu)): - inner._guild_ids = defaults - elif isinstance(inner, Command): - if inner.parent is not None: - raise ValueError('child commands of a group cannot have default guilds set') - - inner._guild_ids = defaults - else: - # Runtime attribute assignment - inner.__discord_app_commands_default_guilds__ = defaults # type: ignore - - return inner - - return decorator - - -def check(predicate: Check) -> Callable[[T], T]: - r"""A decorator that adds a check to an application command. - - These checks should be predicates that take in a single parameter taking - a :class:`~discord.Interaction`. If the check returns a ``False``\-like value then - during invocation a :exc:`CheckFailure` exception is raised and sent to - the appropriate error handlers. - - These checks can be either a coroutine or not. - - Examples - --------- - - Creating a basic check to see if the command invoker is you. - - .. code-block:: python3 - - def check_if_it_is_me(interaction: discord.Interaction) -> bool: - return interaction.user.id == 85309593344815104 - - @tree.command() - @app_commands.check(check_if_it_is_me) - async def only_for_me(interaction: discord.Interaction): - await interaction.response.send_message('I know you!', ephemeral=True) - - Transforming common checks into its own decorator: - - .. code-block:: python3 - - def is_me(): - def predicate(interaction: discord.Interaction) -> bool: - return interaction.user.id == 85309593344815104 - return app_commands.check(predicate) - - @tree.command() - @is_me() - async def only_me(interaction: discord.Interaction): - await interaction.response.send_message('Only you!') - - Parameters - ----------- - predicate: Callable[[:class:`~discord.Interaction`], :class:`bool`] - The predicate to check if the command should be invoked. - """ - - def decorator(func: CheckInputParameter) -> CheckInputParameter: - if isinstance(func, (Command, ContextMenu)): - func.checks.append(predicate) - else: - if not hasattr(func, '__discord_app_commands_checks__'): - func.__discord_app_commands_checks__ = [] - - func.__discord_app_commands_checks__.append(predicate) - - return func - - return decorator # type: ignore - - -@overload -def guild_only(func: None = ...) -> Callable[[T], T]: - ... - - -@overload -def guild_only(func: T) -> T: - ... - - -def guild_only(func: Optional[T] = None) -> Union[T, Callable[[T], T]]: - """A decorator that indicates this command can only be used in a guild context. - - This is **not** implemented as a :func:`check`, and is instead verified by Discord server side. - Therefore, there is no error handler called when a command is used within a private message. - - This decorator can be called with or without parentheses. - - Due to a Discord limitation, this decorator does nothing in subcommands and is ignored. - - Examples - --------- - - .. code-block:: python3 - - @app_commands.command() - @app_commands.guild_only() - async def my_guild_only_command(interaction: discord.Interaction) -> None: - await interaction.response.send_message('I am only available in guilds!') - """ - - def inner(f: T) -> T: - if isinstance(f, (Command, Group, ContextMenu)): - f.guild_only = True - else: - f.__discord_app_commands_guild_only__ = True # type: ignore # Runtime attribute assignment - return f - - # Check if called with parentheses or not - if func is None: - # Called with parentheses - return inner - else: - return inner(func) - - -def default_permissions(**perms: bool) -> Callable[[T], T]: - r"""A decorator that sets the default permissions needed to execute this command. - - When this decorator is used, by default users must have these permissions to execute the command. - However, an administrator can change the permissions needed to execute this command using the official - client. Therefore, this only serves as a hint. - - Setting an empty permissions field, including via calling this with no arguments, will disallow anyone - except server administrators from using the command in a guild. - - This is sent to Discord server side, and is not a :func:`check`. Therefore, error handlers are not called. - - Due to a Discord limitation, this decorator does nothing in subcommands and is ignored. - - .. warning:: - - This serves as a *hint* and members are *not* required to have the permissions given to actually - execute this command. If you want to ensure that members have the permissions needed, consider using - :func:`~discord.app_commands.checks.has_permissions` instead. - - Parameters - ----------- - \*\*perms: :class:`bool` - Keyword arguments denoting the permissions to set as the default. - - Example - --------- - - .. code-block:: python3 - - @app_commands.command() - @app_commands.default_permissions(manage_messages=True) - async def test(interaction: discord.Interaction): - await interaction.response.send_message('You may or may not have manage messages.') - """ - - permissions = Permissions(**perms) - - def decorator(func: T) -> T: - if isinstance(func, (Command, Group, ContextMenu)): - func.default_permissions = permissions - else: - func.__discord_app_commands_default_permissions__ = permissions # type: ignore # Runtime attribute assignment - - return func - - return decorator diff --git a/.venv/Lib/site-packages/discord/app_commands/errors.py b/.venv/Lib/site-packages/discord/app_commands/errors.py deleted file mode 100644 index 3cc12c7..0000000 --- a/.venv/Lib/site-packages/discord/app_commands/errors.py +++ /dev/null @@ -1,537 +0,0 @@ -""" -The MIT License (MIT) - -Copyright (c) 2015-present Rapptz - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the "Software"), -to deal in the Software without restriction, including without limitation -the rights to use, copy, modify, merge, publish, distribute, sublicense, -and/or sell copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. -""" - -from __future__ import annotations - -from typing import Any, TYPE_CHECKING, List, Optional, Sequence, Union - -from ..enums import AppCommandOptionType, AppCommandType, Locale -from ..errors import DiscordException, HTTPException, _flatten_error_dict - -__all__ = ( - 'AppCommandError', - 'CommandInvokeError', - 'TransformerError', - 'TranslationError', - 'CheckFailure', - 'CommandAlreadyRegistered', - 'CommandSignatureMismatch', - 'CommandNotFound', - 'CommandLimitReached', - 'NoPrivateMessage', - 'MissingRole', - 'MissingAnyRole', - 'MissingPermissions', - 'BotMissingPermissions', - 'CommandOnCooldown', - 'MissingApplicationID', - 'CommandSyncFailure', -) - -if TYPE_CHECKING: - from .commands import Command, Group, ContextMenu, Parameter - from .transformers import Transformer - from .translator import TranslationContextTypes, locale_str - from ..types.snowflake import Snowflake, SnowflakeList - from .checks import Cooldown - - CommandTypes = Union[Command[Any, ..., Any], Group, ContextMenu] - -APP_ID_NOT_FOUND = ( - 'Client does not have an application_id set. Either the function was called before on_ready ' - 'was called or application_id was not passed to the Client constructor.' -) - - -class AppCommandError(DiscordException): - """The base exception type for all application command related errors. - - This inherits from :exc:`discord.DiscordException`. - - This exception and exceptions inherited from it are handled - in a special way as they are caught and passed into various error handlers - in this order: - - - :meth:`Command.error ` - - :meth:`Group.on_error ` - - :meth:`CommandTree.on_error ` - - .. versionadded:: 2.0 - """ - - pass - - -class CommandInvokeError(AppCommandError): - """An exception raised when the command being invoked raised an exception. - - This inherits from :exc:`~discord.app_commands.AppCommandError`. - - .. versionadded:: 2.0 - - Attributes - ----------- - original: :exc:`Exception` - The original exception that was raised. You can also get this via - the ``__cause__`` attribute. - command: Union[:class:`Command`, :class:`ContextMenu`] - The command that failed. - """ - - def __init__(self, command: Union[Command[Any, ..., Any], ContextMenu], e: Exception) -> None: - self.original: Exception = e - self.command: Union[Command[Any, ..., Any], ContextMenu] = command - super().__init__(f'Command {command.name!r} raised an exception: {e.__class__.__name__}: {e}') - - -class TransformerError(AppCommandError): - """An exception raised when a :class:`Transformer` or type annotation fails to - convert to its target type. - - This inherits from :exc:`~discord.app_commands.AppCommandError`. - - If an exception occurs while converting that does not subclass - :exc:`AppCommandError` then the exception is wrapped into this exception. - The original exception can be retrieved using the ``__cause__`` attribute. - Otherwise if the exception derives from :exc:`AppCommandError` then it will - be propagated as-is. - - .. versionadded:: 2.0 - - Attributes - ----------- - value: Any - The value that failed to convert. - type: :class:`~discord.AppCommandOptionType` - The type of argument that failed to convert. - transformer: :class:`Transformer` - The transformer that failed the conversion. - """ - - def __init__(self, value: Any, opt_type: AppCommandOptionType, transformer: Transformer): - self.value: Any = value - self.type: AppCommandOptionType = opt_type - self.transformer: Transformer = transformer - - super().__init__(f'Failed to convert {value} to {transformer._error_display_name!s}') - - -class TranslationError(AppCommandError): - """An exception raised when the library fails to translate a string. - - This inherits from :exc:`~discord.app_commands.AppCommandError`. - - If an exception occurs while calling :meth:`Translator.translate` that does - not subclass this then the exception is wrapped into this exception. - The original exception can be retrieved using the ``__cause__`` attribute. - Otherwise it will be propagated as-is. - - .. versionadded:: 2.0 - - Attributes - ----------- - string: Optional[Union[:class:`str`, :class:`locale_str`]] - The string that caused the error, if any. - locale: Optional[:class:`~discord.Locale`] - The locale that caused the error, if any. - context: :class:`~discord.app_commands.TranslationContext` - The context of the translation that triggered the error. - """ - - def __init__( - self, - *msg: str, - string: Optional[Union[str, locale_str]] = None, - locale: Optional[Locale] = None, - context: TranslationContextTypes, - ) -> None: - self.string: Optional[Union[str, locale_str]] = string - self.locale: Optional[Locale] = locale - self.context: TranslationContextTypes = context - - if msg: - super().__init__(*msg) - else: - ctx = context.location.name.replace('_', ' ') - fmt = f'Failed to translate {self.string!r} in a {ctx}' - if self.locale is not None: - fmt = f'{fmt} in the {self.locale.value} locale' - - super().__init__(fmt) - - -class CheckFailure(AppCommandError): - """An exception raised when check predicates in a command have failed. - - This inherits from :exc:`~discord.app_commands.AppCommandError`. - - .. versionadded:: 2.0 - """ - - pass - - -class NoPrivateMessage(CheckFailure): - """An exception raised when a command does not work in a direct message. - - This inherits from :exc:`~discord.app_commands.CheckFailure`. - - .. versionadded:: 2.0 - """ - - def __init__(self, message: Optional[str] = None) -> None: - super().__init__(message or 'This command cannot be used in direct messages.') - - -class MissingRole(CheckFailure): - """An exception raised when the command invoker lacks a role to run a command. - - This inherits from :exc:`~discord.app_commands.CheckFailure`. - - .. versionadded:: 2.0 - - Attributes - ----------- - missing_role: Union[:class:`str`, :class:`int`] - The required role that is missing. - This is the parameter passed to :func:`~discord.app_commands.checks.has_role`. - """ - - def __init__(self, missing_role: Snowflake) -> None: - self.missing_role: Snowflake = missing_role - message = f'Role {missing_role!r} is required to run this command.' - super().__init__(message) - - -class MissingAnyRole(CheckFailure): - """An exception raised when the command invoker lacks any of the roles - specified to run a command. - - This inherits from :exc:`~discord.app_commands.CheckFailure`. - - .. versionadded:: 2.0 - - Attributes - ----------- - missing_roles: List[Union[:class:`str`, :class:`int`]] - The roles that the invoker is missing. - These are the parameters passed to :func:`~discord.app_commands.checks.has_any_role`. - """ - - def __init__(self, missing_roles: SnowflakeList) -> None: - self.missing_roles: SnowflakeList = missing_roles - - missing = [f"'{role}'" for role in missing_roles] - - if len(missing) > 2: - fmt = '{}, or {}'.format(', '.join(missing[:-1]), missing[-1]) - else: - fmt = ' or '.join(missing) - - message = f'You are missing at least one of the required roles: {fmt}' - super().__init__(message) - - -class MissingPermissions(CheckFailure): - """An exception raised when the command invoker lacks permissions to run a - command. - - This inherits from :exc:`~discord.app_commands.CheckFailure`. - - .. versionadded:: 2.0 - - Attributes - ----------- - missing_permissions: List[:class:`str`] - The required permissions that are missing. - """ - - def __init__(self, missing_permissions: List[str], *args: Any) -> None: - self.missing_permissions: List[str] = missing_permissions - - missing = [perm.replace('_', ' ').replace('guild', 'server').title() for perm in missing_permissions] - - if len(missing) > 2: - fmt = '{}, and {}'.format(", ".join(missing[:-1]), missing[-1]) - else: - fmt = ' and '.join(missing) - message = f'You are missing {fmt} permission(s) to run this command.' - super().__init__(message, *args) - - -class BotMissingPermissions(CheckFailure): - """An exception raised when the bot's member lacks permissions to run a - command. - - This inherits from :exc:`~discord.app_commands.CheckFailure`. - - .. versionadded:: 2.0 - - Attributes - ----------- - missing_permissions: List[:class:`str`] - The required permissions that are missing. - """ - - def __init__(self, missing_permissions: List[str], *args: Any) -> None: - self.missing_permissions: List[str] = missing_permissions - - missing = [perm.replace('_', ' ').replace('guild', 'server').title() for perm in missing_permissions] - - if len(missing) > 2: - fmt = '{}, and {}'.format(", ".join(missing[:-1]), missing[-1]) - else: - fmt = ' and '.join(missing) - message = f'Bot requires {fmt} permission(s) to run this command.' - super().__init__(message, *args) - - -class CommandOnCooldown(CheckFailure): - """An exception raised when the command being invoked is on cooldown. - - This inherits from :exc:`~discord.app_commands.CheckFailure`. - - .. versionadded:: 2.0 - - Attributes - ----------- - cooldown: :class:`~discord.app_commands.Cooldown` - The cooldown that was triggered. - retry_after: :class:`float` - The amount of seconds to wait before you can retry again. - """ - - def __init__(self, cooldown: Cooldown, retry_after: float) -> None: - self.cooldown: Cooldown = cooldown - self.retry_after: float = retry_after - super().__init__(f'You are on cooldown. Try again in {retry_after:.2f}s') - - -class CommandAlreadyRegistered(AppCommandError): - """An exception raised when a command is already registered. - - This inherits from :exc:`~discord.app_commands.AppCommandError`. - - .. versionadded:: 2.0 - - Attributes - ----------- - name: :class:`str` - The name of the command already registered. - guild_id: Optional[:class:`int`] - The guild ID this command was already registered at. - If ``None`` then it was a global command. - """ - - def __init__(self, name: str, guild_id: Optional[int]): - self.name: str = name - self.guild_id: Optional[int] = guild_id - super().__init__(f'Command {name!r} already registered.') - - -class CommandNotFound(AppCommandError): - """An exception raised when an application command could not be found. - - This inherits from :exc:`~discord.app_commands.AppCommandError`. - - .. versionadded:: 2.0 - - Attributes - ------------ - name: :class:`str` - The name of the application command not found. - parents: List[:class:`str`] - A list of parent command names that were previously found - prior to the application command not being found. - type: :class:`~discord.AppCommandType` - The type of command that was not found. - """ - - def __init__(self, name: str, parents: List[str], type: AppCommandType = AppCommandType.chat_input): - self.name: str = name - self.parents: List[str] = parents - self.type: AppCommandType = type - super().__init__(f'Application command {name!r} not found') - - -class CommandLimitReached(AppCommandError): - """An exception raised when the maximum number of application commands was reached - either globally or in a guild. - - This inherits from :exc:`~discord.app_commands.AppCommandError`. - - .. versionadded:: 2.0 - - Attributes - ------------ - type: :class:`~discord.AppCommandType` - The type of command that reached the limit. - guild_id: Optional[:class:`int`] - The guild ID that reached the limit or ``None`` if it was global. - limit: :class:`int` - The limit that was hit. - """ - - def __init__(self, guild_id: Optional[int], limit: int, type: AppCommandType = AppCommandType.chat_input): - self.guild_id: Optional[int] = guild_id - self.limit: int = limit - self.type: AppCommandType = type - - lookup = { - AppCommandType.chat_input: 'slash commands', - AppCommandType.message: 'message context menu commands', - AppCommandType.user: 'user context menu commands', - } - desc = lookup.get(type, 'application commands') - ns = 'globally' if self.guild_id is None else f'for guild ID {self.guild_id}' - super().__init__(f'maximum number of {desc} exceeded {limit} {ns}') - - -class CommandSignatureMismatch(AppCommandError): - """An exception raised when an application command from Discord has a different signature - from the one provided in the code. This happens because your command definition differs - from the command definition you provided Discord. Either your code is out of date or the - data from Discord is out of sync. - - This inherits from :exc:`~discord.app_commands.AppCommandError`. - - .. versionadded:: 2.0 - - Attributes - ------------ - command: Union[:class:`~.app_commands.Command`, :class:`~.app_commands.ContextMenu`, :class:`~.app_commands.Group`] - The command that had the signature mismatch. - """ - - def __init__(self, command: Union[Command[Any, ..., Any], ContextMenu, Group]): - self.command: Union[Command[Any, ..., Any], ContextMenu, Group] = command - msg = ( - f'The signature for command {command.name!r} is different from the one provided by Discord. ' - 'This can happen because either your code is out of date or you have not synced the ' - 'commands with Discord, causing the mismatch in data. It is recommended to sync the ' - 'command tree to fix this issue.' - ) - super().__init__(msg) - - -class MissingApplicationID(AppCommandError): - """An exception raised when the client does not have an application ID set. - An application ID is required for syncing application commands. - - This inherits from :exc:`~discord.app_commands.AppCommandError`. - - .. versionadded:: 2.0 - """ - - def __init__(self, message: Optional[str] = None): - super().__init__(message or APP_ID_NOT_FOUND) - - -def _get_command_error( - index: str, - inner: Any, - objects: Sequence[Union[Parameter, CommandTypes]], - messages: List[str], - indent: int = 0, -) -> None: - # Import these here to avoid circular imports - from .commands import Command, Group, ContextMenu - - indentation = ' ' * indent - - # Top level errors are: - # : { : } - # The dicts could be nested, e.g. - # : { : { : } } - # Luckily, this is already handled by the flatten_error_dict utility - if not index.isdigit(): - errors = _flatten_error_dict(inner, index) - messages.extend(f'In {k}: {v}' for k, v in errors.items()) - return - - idx = int(index) - try: - obj = objects[idx] - except IndexError: - dedent_one_level = ' ' * (indent - 2) - errors = _flatten_error_dict(inner, index) - messages.extend(f'{dedent_one_level}In {k}: {v}' for k, v in errors.items()) - return - - children: Sequence[Union[Parameter, CommandTypes]] = [] - if isinstance(obj, Command): - messages.append(f'{indentation}In command {obj.qualified_name!r} defined in function {obj.callback.__qualname__!r}') - children = obj.parameters - elif isinstance(obj, Group): - messages.append(f'{indentation}In group {obj.qualified_name!r} defined in module {obj.module!r}') - children = obj.commands - elif isinstance(obj, ContextMenu): - messages.append( - f'{indentation}In context menu {obj.qualified_name!r} defined in function {obj.callback.__qualname__!r}' - ) - else: - messages.append(f'{indentation}In parameter {obj.name!r}') - - for key, remaining in inner.items(): - # Special case the 'options' key since they have well defined meanings - if key == 'options': - for index, d in remaining.items(): - _get_command_error(index, d, children, messages, indent=indent + 2) - else: - if isinstance(remaining, dict): - try: - inner_errors = remaining['_errors'] - except KeyError: - errors = _flatten_error_dict(remaining, key=key) - else: - errors = {key: ' '.join(x.get('message', '') for x in inner_errors)} - else: - errors = _flatten_error_dict(remaining, key=key) - - messages.extend(f'{indentation} {k}: {v}' for k, v in errors.items()) - - -class CommandSyncFailure(AppCommandError, HTTPException): - """An exception raised when :meth:`CommandTree.sync` failed. - - This provides syncing failures in a slightly more readable format. - - This inherits from :exc:`~discord.app_commands.AppCommandError` - and :exc:`~discord.HTTPException`. - - .. versionadded:: 2.0 - """ - - def __init__(self, child: HTTPException, commands: List[CommandTypes]) -> None: - # Consume the child exception and make it seem as if we are that exception - self.__dict__.update(child.__dict__) - - messages = [f'Failed to upload commands to Discord (HTTP status {self.status}, error code {self.code})'] - - if self._errors: - for index, inner in self._errors.items(): - _get_command_error(index, inner, commands, messages) - - # Equivalent to super().__init__(...) but skips other constructors - self.args = ('\n'.join(messages),) diff --git a/.venv/Lib/site-packages/discord/app_commands/models.py b/.venv/Lib/site-packages/discord/app_commands/models.py deleted file mode 100644 index 3e9d250..0000000 --- a/.venv/Lib/site-packages/discord/app_commands/models.py +++ /dev/null @@ -1,1091 +0,0 @@ -""" -The MIT License (MIT) - -Copyright (c) 2015-present Rapptz - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the "Software"), -to deal in the Software without restriction, including without limitation -the rights to use, copy, modify, merge, publish, distribute, sublicense, -and/or sell copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. -""" - -from __future__ import annotations -from datetime import datetime - -from .errors import MissingApplicationID -from .translator import TranslationContextLocation, TranslationContext, locale_str, Translator -from ..permissions import Permissions -from ..enums import AppCommandOptionType, AppCommandType, AppCommandPermissionType, ChannelType, Locale, try_enum -from ..mixins import Hashable -from ..utils import _get_as_snowflake, parse_time, snowflake_time, MISSING -from ..object import Object -from ..role import Role -from ..member import Member - -from typing import Any, Dict, Generic, List, TYPE_CHECKING, Optional, TypeVar, Union - -__all__ = ( - 'AppCommand', - 'AppCommandGroup', - 'AppCommandChannel', - 'AppCommandThread', - 'AppCommandPermissions', - 'GuildAppCommandPermissions', - 'Argument', - 'Choice', - 'AllChannels', -) - -ChoiceT = TypeVar('ChoiceT', str, int, float, Union[str, int, float]) - - -def is_app_command_argument_type(value: int) -> bool: - return 11 >= value >= 3 - - -if TYPE_CHECKING: - from ..types.command import ( - ApplicationCommand as ApplicationCommandPayload, - ApplicationCommandOption, - ApplicationCommandOptionChoice, - ApplicationCommandPermissions, - GuildApplicationCommandPermissions, - ) - from ..types.interactions import ( - PartialChannel, - PartialThread, - ) - from ..types.threads import ( - ThreadMetadata, - ThreadArchiveDuration, - ) - - from ..abc import Snowflake - from ..state import ConnectionState - from ..guild import GuildChannel, Guild - from ..channel import TextChannel - from ..threads import Thread - from ..user import User - - ApplicationCommandParent = Union['AppCommand', 'AppCommandGroup'] - - -class AllChannels: - """Represents all channels for application command permissions. - - .. versionadded:: 2.0 - - Attributes - ----------- - guild: :class:`~discord.Guild` - The guild the application command permission is for. - """ - - __slots__ = ('guild',) - - def __init__(self, guild: Guild): - self.guild: Guild = guild - - @property - def id(self) -> int: - """:class:`int`: The ID sentinel used to represent all channels. Equivalent to the guild's ID minus 1.""" - return self.guild.id - 1 - - def __repr__(self) -> str: - return f'' - - -def _to_locale_dict(data: Dict[str, str]) -> Dict[Locale, str]: - return {try_enum(Locale, key): value for key, value in data.items()} - - -class AppCommand(Hashable): - """Represents an application command. - - In common parlance this is referred to as a "Slash Command" or a - "Context Menu Command". - - .. versionadded:: 2.0 - - .. container:: operations - - .. describe:: x == y - - Checks if two application commands are equal. - - .. describe:: x != y - - Checks if two application commands are not equal. - - .. describe:: hash(x) - - Returns the application command's hash. - - .. describe:: str(x) - - Returns the application command's name. - - Attributes - ----------- - id: :class:`int` - The application command's ID. - application_id: :class:`int` - The application command's application's ID. - type: :class:`~discord.AppCommandType` - The application command's type. - name: :class:`str` - The application command's name. - description: :class:`str` - The application command's description. - name_localizations: Dict[:class:`~discord.Locale`, :class:`str`] - The localised names of the application command. Used for display purposes. - description_localizations: Dict[:class:`~discord.Locale`, :class:`str`] - The localised descriptions of the application command. Used for display purposes. - options: List[Union[:class:`Argument`, :class:`AppCommandGroup`]] - A list of options. - default_member_permissions: Optional[:class:`~discord.Permissions`] - The default member permissions that can run this command. - dm_permission: :class:`bool` - A boolean that indicates whether this command can be run in direct messages. - guild_id: Optional[:class:`int`] - The ID of the guild this command is registered in. A value of ``None`` - denotes that it is a global command. - nsfw: :class:`bool` - Whether the command is NSFW and should only work in NSFW channels. - """ - - __slots__ = ( - 'id', - 'type', - 'application_id', - 'name', - 'description', - 'name_localizations', - 'description_localizations', - 'guild_id', - 'options', - 'default_member_permissions', - 'dm_permission', - 'nsfw', - '_state', - ) - - def __init__(self, *, data: ApplicationCommandPayload, state: ConnectionState) -> None: - self._state: ConnectionState = state - self._from_data(data) - - def _from_data(self, data: ApplicationCommandPayload) -> None: - self.id: int = int(data['id']) - self.application_id: int = int(data['application_id']) - self.name: str = data['name'] - self.description: str = data['description'] - self.guild_id: Optional[int] = _get_as_snowflake(data, 'guild_id') - self.type: AppCommandType = try_enum(AppCommandType, data.get('type', 1)) - self.options: List[Union[Argument, AppCommandGroup]] = [ - app_command_option_factory(data=d, parent=self, state=self._state) for d in data.get('options', []) - ] - self.default_member_permissions: Optional[Permissions] - permissions = data.get('default_member_permissions') - if permissions is None: - self.default_member_permissions = None - else: - self.default_member_permissions = Permissions(int(permissions)) - - dm_permission = data.get('dm_permission') - # For some reason this field can be explicit null and mean True - if dm_permission is None: - dm_permission = True - - self.dm_permission: bool = dm_permission - self.nsfw: bool = data.get('nsfw', False) - self.name_localizations: Dict[Locale, str] = _to_locale_dict(data.get('name_localizations') or {}) - self.description_localizations: Dict[Locale, str] = _to_locale_dict(data.get('description_localizations') or {}) - - def to_dict(self) -> ApplicationCommandPayload: - return { - 'id': self.id, - 'type': self.type.value, - 'application_id': self.application_id, - 'name': self.name, - 'description': self.description, - 'name_localizations': {str(k): v for k, v in self.name_localizations.items()}, - 'description_localizations': {str(k): v for k, v in self.description_localizations.items()}, - 'options': [opt.to_dict() for opt in self.options], - } # type: ignore # Type checker does not understand this literal. - - def __str__(self) -> str: - return self.name - - def __repr__(self) -> str: - return f'<{self.__class__.__name__} id={self.id!r} name={self.name!r} type={self.type!r}>' - - @property - def mention(self) -> str: - """:class:`str`: Returns a string that allows you to mention the given AppCommand.""" - return f'' - - @property - def guild(self) -> Optional[Guild]: - """Optional[:class:`~discord.Guild`]: Returns the guild this command is registered to - if it exists. - """ - return self._state._get_guild(self.guild_id) - - async def delete(self) -> None: - """|coro| - - Deletes the application command. - - Raises - ------- - NotFound - The application command was not found. - Forbidden - You do not have permission to delete this application command. - HTTPException - Deleting the application command failed. - MissingApplicationID - The client does not have an application ID. - """ - state = self._state - if not state.application_id: - raise MissingApplicationID - - if self.guild_id: - await state.http.delete_guild_command( - state.application_id, - self.guild_id, - self.id, - ) - else: - await state.http.delete_global_command( - state.application_id, - self.id, - ) - - async def edit( - self, - *, - name: str = MISSING, - description: str = MISSING, - default_member_permissions: Optional[Permissions] = MISSING, - dm_permission: bool = MISSING, - options: List[Union[Argument, AppCommandGroup]] = MISSING, - ) -> AppCommand: - """|coro| - - Edits the application command. - - Parameters - ----------- - name: :class:`str` - The new name for the application command. - description: :class:`str` - The new description for the application command. - default_member_permissions: Optional[:class:`~discord.Permissions`] - The new default permissions needed to use this application command. - Pass value of ``None`` to remove any permission requirements. - dm_permission: :class:`bool` - Indicates if the application command can be used in DMs. - options: List[Union[:class:`Argument`, :class:`AppCommandGroup`]] - List of new options for this application command. - - Raises - ------- - NotFound - The application command was not found. - Forbidden - You do not have permission to edit this application command. - HTTPException - Editing the application command failed. - MissingApplicationID - The client does not have an application ID. - - Returns - -------- - :class:`AppCommand` - The newly edited application command. - """ - state = self._state - if not state.application_id: - raise MissingApplicationID - - payload = {} - - if name is not MISSING: - payload['name'] = name - - if description is not MISSING: - payload['description'] = description - - if default_member_permissions is not MISSING: - if default_member_permissions is not None: - payload['default_member_permissions'] = default_member_permissions.value - else: - payload['default_member_permissions'] = None - - if self.guild_id is None and dm_permission is not MISSING: - payload['dm_permission'] = dm_permission - - if options is not MISSING: - payload['options'] = [option.to_dict() for option in options] - - if not payload: - return self - - if self.guild_id: - data = await state.http.edit_guild_command( - state.application_id, - self.guild_id, - self.id, - payload, - ) - else: - data = await state.http.edit_global_command( - state.application_id, - self.id, - payload, - ) - return AppCommand(data=data, state=state) - - async def fetch_permissions(self, guild: Snowflake) -> GuildAppCommandPermissions: - """|coro| - - Retrieves this command's permission in the guild. - - Parameters - ----------- - guild: :class:`~discord.abc.Snowflake` - The guild to retrieve the permissions from. - - Raises - ------- - Forbidden - You do not have permission to fetch the application command's permissions. - HTTPException - Fetching the application command's permissions failed. - MissingApplicationID - The client does not have an application ID. - NotFound - The application command's permissions could not be found. - This can also indicate that the permissions are synced with the guild - (i.e. they are unchanged from the default). - - Returns - -------- - :class:`GuildAppCommandPermissions` - An object representing the application command's permissions in the guild. - """ - state = self._state - if not state.application_id: - raise MissingApplicationID - - data = await state.http.get_application_command_permissions( - state.application_id, - guild.id, - self.id, - ) - return GuildAppCommandPermissions(data=data, state=state, command=self) - - -class Choice(Generic[ChoiceT]): - """Represents an application command argument choice. - - .. versionadded:: 2.0 - - .. container:: operations - - .. describe:: x == y - - Checks if two choices are equal. - - .. describe:: x != y - - Checks if two choices are not equal. - - .. describe:: hash(x) - - Returns the choice's hash. - - Parameters - ----------- - name: Union[:class:`str`, :class:`locale_str`] - The name of the choice. Used for display purposes. - Can only be up to 100 characters. - name_localizations: Dict[:class:`~discord.Locale`, :class:`str`] - The localised names of the choice. Used for display purposes. - value: Union[:class:`int`, :class:`str`, :class:`float`] - The value of the choice. If it's a string, it can only be - up to 100 characters long. - """ - - __slots__ = ('name', 'value', '_locale_name', 'name_localizations') - - def __init__(self, *, name: Union[str, locale_str], value: ChoiceT): - name, locale = (name.message, name) if isinstance(name, locale_str) else (name, None) - self.name: str = name - self._locale_name: Optional[locale_str] = locale - self.value: ChoiceT = value - self.name_localizations: Dict[Locale, str] = {} - - @classmethod - def from_dict(cls, data: ApplicationCommandOptionChoice) -> Choice[ChoiceT]: - self = cls.__new__(cls) - self.name = data['name'] - self.value = data['value'] # type: ignore # This seems to break every other pyright release - self.name_localizations = _to_locale_dict(data.get('name_localizations') or {}) - return self - - def __eq__(self, o: object) -> bool: - return isinstance(o, Choice) and self.name == o.name and self.value == o.value - - def __hash__(self) -> int: - return hash((self.name, self.value)) - - def __repr__(self) -> str: - return f'{self.__class__.__name__}(name={self.name!r}, value={self.value!r})' - - @property - def _option_type(self) -> AppCommandOptionType: - if isinstance(self.value, int): - return AppCommandOptionType.integer - elif isinstance(self.value, float): - return AppCommandOptionType.number - elif isinstance(self.value, str): - return AppCommandOptionType.string - else: - raise TypeError( - f'invalid Choice value type given, expected int, str, or float but received {self.value.__class__.__name__}' - ) - - async def get_translated_payload(self, translator: Translator) -> Dict[str, Any]: - base = self.to_dict() - name_localizations: Dict[str, str] = {} - context = TranslationContext(location=TranslationContextLocation.choice_name, data=self) - if self._locale_name: - for locale in Locale: - translation = await translator._checked_translate(self._locale_name, locale, context) - if translation is not None: - name_localizations[locale.value] = translation - - if name_localizations: - base['name_localizations'] = name_localizations - - return base - - async def get_translated_payload_for_locale(self, translator: Translator, locale: Locale) -> Dict[str, Any]: - base = self.to_dict() - if self._locale_name: - context = TranslationContext(location=TranslationContextLocation.choice_name, data=self) - translation = await translator._checked_translate(self._locale_name, locale, context) - if translation is not None: - base['name'] = translation - - return base - - def to_dict(self) -> Dict[str, Any]: - base = { - 'name': self.name, - 'value': self.value, - } - if self.name_localizations: - base['name_localizations'] = {str(k): v for k, v in self.name_localizations.items()} - return base - - -class AppCommandChannel(Hashable): - """Represents an application command partially resolved channel object. - - .. versionadded:: 2.0 - - .. container:: operations - - .. describe:: x == y - - Checks if two channels are equal. - - .. describe:: x != y - - Checks if two channels are not equal. - - .. describe:: hash(x) - - Returns the channel's hash. - - .. describe:: str(x) - - Returns the channel's name. - - Attributes - ----------- - id: :class:`int` - The ID of the channel. - type: :class:`~discord.ChannelType` - The type of channel. - name: :class:`str` - The name of the channel. - permissions: :class:`~discord.Permissions` - The resolved permissions of the user who invoked - the application command in that channel. - guild_id: :class:`int` - The guild ID this channel belongs to. - """ - - __slots__ = ( - 'id', - 'type', - 'name', - 'permissions', - 'guild_id', - '_state', - ) - - def __init__( - self, - *, - state: ConnectionState, - data: PartialChannel, - guild_id: int, - ): - self._state: ConnectionState = state - self.guild_id: int = guild_id - self.id: int = int(data['id']) - self.type: ChannelType = try_enum(ChannelType, data['type']) - self.name: str = data['name'] - self.permissions: Permissions = Permissions(int(data['permissions'])) - - def __str__(self) -> str: - return self.name - - def __repr__(self) -> str: - return f'<{self.__class__.__name__} id={self.id!r} name={self.name!r} type={self.type!r}>' - - @property - def guild(self) -> Optional[Guild]: - """Optional[:class:`~discord.Guild`]: The channel's guild, from cache, if found.""" - return self._state._get_guild(self.guild_id) - - def resolve(self) -> Optional[GuildChannel]: - """Resolves the application command channel to the appropriate channel - from cache if found. - - Returns - -------- - Optional[:class:`.abc.GuildChannel`] - The resolved guild channel or ``None`` if not found in cache. - """ - guild = self._state._get_guild(self.guild_id) - if guild is not None: - return guild.get_channel(self.id) - return None - - async def fetch(self) -> GuildChannel: - """|coro| - - Fetches the partial channel to a full :class:`.abc.GuildChannel`. - - Raises - -------- - NotFound - The channel was not found. - Forbidden - You do not have the permissions required to get a channel. - HTTPException - Retrieving the channel failed. - - Returns - -------- - :class:`.abc.GuildChannel` - The full channel. - """ - client = self._state._get_client() - return await client.fetch_channel(self.id) # type: ignore # This is explicit narrowing - - @property - def mention(self) -> str: - """:class:`str`: The string that allows you to mention the channel.""" - return f'<#{self.id}>' - - @property - def created_at(self) -> datetime: - """:class:`datetime.datetime`: An aware timestamp of when this channel was created in UTC.""" - return snowflake_time(self.id) - - -class AppCommandThread(Hashable): - """Represents an application command partially resolved thread object. - - .. versionadded:: 2.0 - - .. container:: operations - - .. describe:: x == y - - Checks if two thread are equal. - - .. describe:: x != y - - Checks if two thread are not equal. - - .. describe:: hash(x) - - Returns the thread's hash. - - .. describe:: str(x) - - Returns the thread's name. - - Attributes - ----------- - id: :class:`int` - The ID of the thread. - type: :class:`~discord.ChannelType` - The type of thread. - name: :class:`str` - The name of the thread. - parent_id: :class:`int` - The parent text channel ID this thread belongs to. - permissions: :class:`~discord.Permissions` - The resolved permissions of the user who invoked - the application command in that thread. - guild_id: :class:`int` - The guild ID this thread belongs to. - archived: :class:`bool` - Whether the thread is archived. - locked: :class:`bool` - Whether the thread is locked. - invitable: :class:`bool` - Whether non-moderators can add other non-moderators to this thread. - This is always ``True`` for public threads. - archiver_id: Optional[:class:`int`] - The user's ID that archived this thread. - auto_archive_duration: :class:`int` - The duration in minutes until the thread is automatically hidden from the channel list. - Usually a value of 60, 1440, 4320 and 10080. - archive_timestamp: :class:`datetime.datetime` - An aware timestamp of when the thread's archived status was last updated in UTC. - """ - - __slots__ = ( - 'id', - 'type', - 'name', - 'permissions', - 'guild_id', - 'parent_id', - 'archived', - 'archiver_id', - 'auto_archive_duration', - 'archive_timestamp', - 'locked', - 'invitable', - '_created_at', - '_state', - ) - - def __init__( - self, - *, - state: ConnectionState, - data: PartialThread, - guild_id: int, - ): - self._state: ConnectionState = state - self.guild_id: int = guild_id - self.id: int = int(data['id']) - self.parent_id: int = int(data['parent_id']) - self.type: ChannelType = try_enum(ChannelType, data['type']) - self.name: str = data['name'] - self.permissions: Permissions = Permissions(int(data['permissions'])) - self._unroll_metadata(data['thread_metadata']) - - def __str__(self) -> str: - return self.name - - def __repr__(self) -> str: - return f'<{self.__class__.__name__} id={self.id!r} name={self.name!r} archived={self.archived} type={self.type!r}>' - - @property - def guild(self) -> Optional[Guild]: - """Optional[:class:`~discord.Guild`]: The channel's guild, from cache, if found.""" - return self._state._get_guild(self.guild_id) - - def _unroll_metadata(self, data: ThreadMetadata) -> None: - self.archived: bool = data['archived'] - self.archiver_id: Optional[int] = _get_as_snowflake(data, 'archiver_id') - self.auto_archive_duration: ThreadArchiveDuration = data['auto_archive_duration'] - self.archive_timestamp: datetime = parse_time(data['archive_timestamp']) - self.locked: bool = data.get('locked', False) - self.invitable: bool = data.get('invitable', True) - self._created_at: Optional[datetime] = parse_time(data.get('create_timestamp')) - - @property - def parent(self) -> Optional[TextChannel]: - """Optional[:class:`~discord.TextChannel`]: The parent channel this thread belongs to.""" - return self.guild.get_channel(self.parent_id) # type: ignore - - @property - def mention(self) -> str: - """:class:`str`: The string that allows you to mention the thread.""" - return f'<#{self.id}>' - - @property - def created_at(self) -> Optional[datetime]: - """An aware timestamp of when the thread was created in UTC. - - .. note:: - - This timestamp only exists for threads created after 9 January 2022, otherwise returns ``None``. - """ - return self._created_at - - def resolve(self) -> Optional[Thread]: - """Resolves the application command channel to the appropriate channel - from cache if found. - - Returns - -------- - Optional[:class:`.abc.GuildChannel`] - The resolved guild channel or ``None`` if not found in cache. - """ - guild = self._state._get_guild(self.guild_id) - if guild is not None: - return guild.get_thread(self.id) - return None - - async def fetch(self) -> Thread: - """|coro| - - Fetches the partial channel to a full :class:`~discord.Thread`. - - Raises - -------- - NotFound - The thread was not found. - Forbidden - You do not have the permissions required to get a thread. - HTTPException - Retrieving the thread failed. - - Returns - -------- - :class:`~discord.Thread` - The full thread. - """ - client = self._state._get_client() - return await client.fetch_channel(self.id) # type: ignore # This is explicit narrowing - - -class Argument: - """Represents an application command argument. - - .. versionadded:: 2.0 - - Attributes - ------------ - type: :class:`~discord.AppCommandOptionType` - The type of argument. - name: :class:`str` - The name of the argument. - description: :class:`str` - The description of the argument. - name_localizations: Dict[:class:`~discord.Locale`, :class:`str`] - The localised names of the argument. Used for display purposes. - description_localizations: Dict[:class:`~discord.Locale`, :class:`str`] - The localised descriptions of the argument. Used for display purposes. - required: :class:`bool` - Whether the argument is required. - choices: List[:class:`Choice`] - A list of choices for the command to choose from for this argument. - parent: Union[:class:`AppCommand`, :class:`AppCommandGroup`] - The parent application command that has this argument. - channel_types: List[:class:`~discord.ChannelType`] - The channel types that are allowed for this parameter. - min_value: Optional[Union[:class:`int`, :class:`float`]] - The minimum supported value for this parameter. - max_value: Optional[Union[:class:`int`, :class:`float`]] - The maximum supported value for this parameter. - min_length: Optional[:class:`int`] - The minimum allowed length for this parameter. - max_length: Optional[:class:`int`] - The maximum allowed length for this parameter. - autocomplete: :class:`bool` - Whether the argument has autocomplete. - """ - - __slots__ = ( - 'type', - 'name', - 'description', - 'name_localizations', - 'description_localizations', - 'required', - 'choices', - 'channel_types', - 'min_value', - 'max_value', - 'min_length', - 'max_length', - 'autocomplete', - 'parent', - '_state', - ) - - def __init__( - self, *, parent: ApplicationCommandParent, data: ApplicationCommandOption, state: Optional[ConnectionState] = None - ) -> None: - self._state: Optional[ConnectionState] = state - self.parent: ApplicationCommandParent = parent - self._from_data(data) - - def __repr__(self) -> str: - return f'<{self.__class__.__name__} name={self.name!r} type={self.type!r} required={self.required}>' - - def _from_data(self, data: ApplicationCommandOption) -> None: - self.type: AppCommandOptionType = try_enum(AppCommandOptionType, data['type']) - self.name: str = data['name'] - self.description: str = data['description'] - self.required: bool = data.get('required', False) - self.min_value: Optional[Union[int, float]] = data.get('min_value') - self.max_value: Optional[Union[int, float]] = data.get('max_value') - self.min_length: Optional[int] = data.get('min_length') - self.max_length: Optional[int] = data.get('max_length') - self.autocomplete: bool = data.get('autocomplete', False) - self.channel_types: List[ChannelType] = [try_enum(ChannelType, d) for d in data.get('channel_types', [])] - self.choices: List[Choice[Union[int, float, str]]] = [Choice.from_dict(d) for d in data.get('choices', [])] - self.name_localizations: Dict[Locale, str] = _to_locale_dict(data.get('name_localizations') or {}) - self.description_localizations: Dict[Locale, str] = _to_locale_dict(data.get('description_localizations') or {}) - - def to_dict(self) -> ApplicationCommandOption: - return { - 'name': self.name, - 'type': self.type.value, - 'description': self.description, - 'required': self.required, - 'choices': [choice.to_dict() for choice in self.choices], - 'channel_types': [channel_type.value for channel_type in self.channel_types], - 'min_value': self.min_value, - 'max_value': self.max_value, - 'min_length': self.min_length, - 'max_length': self.max_length, - 'autocomplete': self.autocomplete, - 'options': [], - 'name_localizations': {str(k): v for k, v in self.name_localizations.items()}, - 'description_localizations': {str(k): v for k, v in self.description_localizations.items()}, - } # type: ignore # Type checker does not understand this literal. - - -class AppCommandGroup: - """Represents an application command subcommand. - - .. versionadded:: 2.0 - - Attributes - ------------ - type: :class:`~discord.AppCommandOptionType` - The type of subcommand. - name: :class:`str` - The name of the subcommand. - description: :class:`str` - The description of the subcommand. - name_localizations: Dict[:class:`~discord.Locale`, :class:`str`] - The localised names of the subcommand. Used for display purposes. - description_localizations: Dict[:class:`~discord.Locale`, :class:`str`] - The localised descriptions of the subcommand. Used for display purposes. - options: List[Union[:class:`Argument`, :class:`AppCommandGroup`]] - A list of options. - parent: Union[:class:`AppCommand`, :class:`AppCommandGroup`] - The parent application command. - """ - - __slots__ = ( - 'type', - 'name', - 'description', - 'name_localizations', - 'description_localizations', - 'options', - 'parent', - '_state', - ) - - def __init__( - self, *, parent: ApplicationCommandParent, data: ApplicationCommandOption, state: Optional[ConnectionState] = None - ) -> None: - self.parent: ApplicationCommandParent = parent - self._state: Optional[ConnectionState] = state - self._from_data(data) - - def __repr__(self) -> str: - return f'<{self.__class__.__name__} name={self.name!r} type={self.type!r}>' - - @property - def qualified_name(self) -> str: - """:class:`str`: Returns the fully qualified command name. - - The qualified name includes the parent name as well. For example, - in a command like ``/foo bar`` the qualified name is ``foo bar``. - """ - # A B C - # ^ self - # ^ parent - # ^ grandparent - names = [self.name, self.parent.name] - if isinstance(self.parent, AppCommandGroup): - names.append(self.parent.parent.name) - - return ' '.join(reversed(names)) - - @property - def mention(self) -> str: - """:class:`str`: Returns a string that allows you to mention the given AppCommandGroup.""" - if isinstance(self.parent, AppCommand): - base_command = self.parent - else: - base_command = self.parent.parent - return f'' # type: ignore - - def _from_data(self, data: ApplicationCommandOption) -> None: - self.type: AppCommandOptionType = try_enum(AppCommandOptionType, data['type']) - self.name: str = data['name'] - self.description: str = data['description'] - self.options: List[Union[Argument, AppCommandGroup]] = [ - app_command_option_factory(data=d, parent=self, state=self._state) for d in data.get('options', []) - ] - self.name_localizations: Dict[Locale, str] = _to_locale_dict(data.get('name_localizations') or {}) - self.description_localizations: Dict[Locale, str] = _to_locale_dict(data.get('description_localizations') or {}) - - def to_dict(self) -> 'ApplicationCommandOption': - return { - 'name': self.name, - 'type': self.type.value, - 'description': self.description, - 'options': [arg.to_dict() for arg in self.options], - 'name_localizations': {str(k): v for k, v in self.name_localizations.items()}, - 'description_localizations': {str(k): v for k, v in self.description_localizations.items()}, - } # type: ignore # Type checker does not understand this literal. - - -class AppCommandPermissions: - """Represents the permissions for an application command. - - .. versionadded:: 2.0 - - Attributes - ----------- - guild: :class:`~discord.Guild` - The guild associated with this permission. - id: :class:`int` - The ID of the permission target, such as a role, channel, or guild. - The special ``guild_id - 1`` sentinel is used to represent "all channels". - target: Any - The role, user, or channel associated with this permission. This could also be the :class:`AllChannels` sentinel type. - Falls back to :class:`~discord.Object` if the target could not be found in the cache. - type: :class:`.AppCommandPermissionType` - The type of permission. - permission: :class:`bool` - The permission value. ``True`` for allow, ``False`` for deny. - """ - - __slots__ = ('id', 'type', 'permission', 'target', 'guild', '_state') - - def __init__(self, *, data: ApplicationCommandPermissions, guild: Guild, state: ConnectionState) -> None: - self._state: ConnectionState = state - self.guild: Guild = guild - - self.id: int = int(data['id']) - self.type: AppCommandPermissionType = try_enum(AppCommandPermissionType, data['type']) - self.permission: bool = data['permission'] - - _object = None - _type = MISSING - - if self.type is AppCommandPermissionType.user: - _object = guild.get_member(self.id) or self._state.get_user(self.id) - _type = Member - elif self.type is AppCommandPermissionType.channel: - if self.id == (guild.id - 1): - _object = AllChannels(guild) - else: - _object = guild.get_channel(self.id) - elif self.type is AppCommandPermissionType.role: - _object = guild.get_role(self.id) - _type = Role - - if _object is None: - _object = Object(id=self.id, type=_type) - - self.target: Union[Object, User, Member, Role, AllChannels, GuildChannel] = _object - - def to_dict(self) -> ApplicationCommandPermissions: - return { - 'id': self.target.id, - 'type': self.type.value, - 'permission': self.permission, - } - - -class GuildAppCommandPermissions: - """Represents the permissions for an application command in a guild. - - .. versionadded:: 2.0 - - Attributes - ----------- - application_id: :class:`int` - The application ID. - command: :class:`.AppCommand` - The application command associated with the permissions. - id: :class:`int` - ID of the command or the application ID. - When this is the application ID instead of a command ID, - the permissions apply to all commands that do not contain explicit overwrites. - guild_id: :class:`int` - The guild ID associated with the permissions. - permissions: List[:class:`AppCommandPermissions`] - The permissions, this is a max of 100. - """ - - __slots__ = ('id', 'application_id', 'command', 'guild_id', 'permissions', '_state') - - def __init__(self, *, data: GuildApplicationCommandPermissions, state: ConnectionState, command: AppCommand) -> None: - self._state: ConnectionState = state - self.command: AppCommand = command - - self.id: int = int(data['id']) - self.application_id: int = int(data['application_id']) - self.guild_id: int = int(data['guild_id']) - guild = self.guild - self.permissions: List[AppCommandPermissions] = [ - AppCommandPermissions(data=value, guild=guild, state=self._state) for value in data['permissions'] - ] - - def to_dict(self) -> Dict[str, Any]: - return {'permissions': [p.to_dict() for p in self.permissions]} - - @property - def guild(self) -> Guild: - """:class:`~discord.Guild`: The guild associated with the permissions.""" - return self._state._get_or_create_unavailable_guild(self.guild_id) - - -def app_command_option_factory( - parent: ApplicationCommandParent, data: ApplicationCommandOption, *, state: Optional[ConnectionState] = None -) -> Union[Argument, AppCommandGroup]: - if is_app_command_argument_type(data['type']): - return Argument(parent=parent, data=data, state=state) - else: - return AppCommandGroup(parent=parent, data=data, state=state) diff --git a/.venv/Lib/site-packages/discord/app_commands/namespace.py b/.venv/Lib/site-packages/discord/app_commands/namespace.py deleted file mode 100644 index 7fad617..0000000 --- a/.venv/Lib/site-packages/discord/app_commands/namespace.py +++ /dev/null @@ -1,263 +0,0 @@ -""" -The MIT License (MIT) - -Copyright (c) 2015-present Rapptz - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the "Software"), -to deal in the Software without restriction, including without limitation -the rights to use, copy, modify, merge, publish, distribute, sublicense, -and/or sell copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. -""" - -from __future__ import annotations - -from typing import TYPE_CHECKING, Any, Dict, Iterable, Iterator, List, NamedTuple, Tuple -from ..member import Member -from ..object import Object -from ..role import Role -from ..message import Message, Attachment -from ..channel import PartialMessageable -from ..enums import AppCommandOptionType -from .models import AppCommandChannel, AppCommandThread - -if TYPE_CHECKING: - from ..interactions import Interaction - from ..types.interactions import ResolvedData, ApplicationCommandInteractionDataOption - -__all__ = ('Namespace',) - - -class ResolveKey(NamedTuple): - id: str - # CommandOptionType does not use 0 or negative numbers so those can be safe for library - # internal use, if necessary. Likewise, only 6, 7, 8, and 11 are actually in use. - type: int - - @classmethod - def any_with(cls, id: str) -> ResolveKey: - return ResolveKey(id=id, type=-1) - - def __eq__(self, o: object) -> bool: - if not isinstance(o, ResolveKey): - return NotImplemented - if self.type == -1 or o.type == -1: - return self.id == o.id - return (self.id, self.type) == (o.id, o.type) - - def __hash__(self) -> int: - # Most of the time an ID lookup is all that is necessary - # In case of collision then we look up both the ID and the type. - return hash(self.id) - - -class Namespace: - """An object that holds the parameters being passed to a command in a mostly raw state. - - This class is deliberately simple and just holds the option name and resolved value as a simple - key-pair mapping. These attributes can be accessed using dot notation. For example, an option - with the name of ``example`` can be accessed using ``ns.example``. If an attribute is not found, - then ``None`` is returned rather than an attribute error. - - .. warning:: - - The key names come from the raw Discord data, which means that if a parameter was renamed then the - renamed key is used instead of the function parameter name. - - .. versionadded:: 2.0 - - .. container:: operations - - .. describe:: x == y - - Checks if two namespaces are equal by checking if all attributes are equal. - .. describe:: x != y - - Checks if two namespaces are not equal. - .. describe:: x[key] - - Returns an attribute if it is found, otherwise raises - a :exc:`KeyError`. - .. describe:: key in x - - Checks if the attribute is in the namespace. - .. describe:: iter(x) - - Returns an iterator of ``(name, value)`` pairs. This allows it - to be, for example, constructed as a dict or a list of pairs. - - This namespace object converts resolved objects into their appropriate form depending on their - type. Consult the table below for conversion information. - - +-------------------------------------------+-------------------------------------------------------------------------------+ - | Option Type | Resolved Type | - +===========================================+===============================================================================+ - | :attr:`.AppCommandOptionType.string` | :class:`str` | - +-------------------------------------------+-------------------------------------------------------------------------------+ - | :attr:`.AppCommandOptionType.integer` | :class:`int` | - +-------------------------------------------+-------------------------------------------------------------------------------+ - | :attr:`.AppCommandOptionType.boolean` | :class:`bool` | - +-------------------------------------------+-------------------------------------------------------------------------------+ - | :attr:`.AppCommandOptionType.number` | :class:`float` | - +-------------------------------------------+-------------------------------------------------------------------------------+ - | :attr:`.AppCommandOptionType.user` | :class:`~discord.User` or :class:`~discord.Member` | - +-------------------------------------------+-------------------------------------------------------------------------------+ - | :attr:`.AppCommandOptionType.channel` | :class:`.AppCommandChannel` or :class:`.AppCommandThread` | - +-------------------------------------------+-------------------------------------------------------------------------------+ - | :attr:`.AppCommandOptionType.role` | :class:`~discord.Role` | - +-------------------------------------------+-------------------------------------------------------------------------------+ - | :attr:`.AppCommandOptionType.mentionable` | :class:`~discord.User` or :class:`~discord.Member`, or :class:`~discord.Role` | - +-------------------------------------------+-------------------------------------------------------------------------------+ - | :attr:`.AppCommandOptionType.attachment` | :class:`~discord.Attachment` | - +-------------------------------------------+-------------------------------------------------------------------------------+ - - .. note:: - - In autocomplete interactions, the namespace might not be validated or filled in. Discord does not - send the resolved data as well, so this means that certain fields end up just as IDs rather than - the resolved data. In these cases, a :class:`discord.Object` is returned instead. - - This is a Discord limitation. - """ - - def __init__( - self, - interaction: Interaction, - resolved: ResolvedData, - options: List[ApplicationCommandInteractionDataOption], - ): - completed = self._get_resolved_items(interaction, resolved) - for option in options: - opt_type = option['type'] - name = option['name'] - focused = option.get('focused', False) - if opt_type in (3, 4, 5): # string, integer, boolean - value = option['value'] # type: ignore # Key is there - self.__dict__[name] = value - elif opt_type == 10: # number - value = option['value'] # type: ignore # Key is there - # This condition is written this way because 0 can be a valid float - if value is None or value == '': - self.__dict__[name] = float('nan') - else: - if not focused: - self.__dict__[name] = float(value) - else: - # Autocomplete focused values tend to be garbage in - self.__dict__[name] = value - elif opt_type in (6, 7, 8, 9, 11): - # Remaining ones should be snowflake based ones with resolved data - snowflake: str = option['value'] # type: ignore # Key is there - if opt_type == 9: # Mentionable - # Mentionable is User | Role, these do not cause any conflict - key = ResolveKey.any_with(snowflake) - else: - # The remaining keys can conflict, for example, a role and a channel - # could end up with the same ID in very old guilds since they used to default - # to sharing the guild ID. Old general channels no longer exist, but some old - # servers will still have them so this needs to be handled. - key = ResolveKey(id=snowflake, type=opt_type) - - value = completed.get(key) or Object(id=int(snowflake)) - self.__dict__[name] = value - - @classmethod - def _get_resolved_items(cls, interaction: Interaction, resolved: ResolvedData) -> Dict[ResolveKey, Any]: - completed: Dict[ResolveKey, Any] = {} - state = interaction._state - members = resolved.get('members', {}) - guild_id = interaction.guild_id - guild = state._get_or_create_unavailable_guild(guild_id) if guild_id is not None else None - type = AppCommandOptionType.user.value - for (user_id, user_data) in resolved.get('users', {}).items(): - try: - member_data = members[user_id] - except KeyError: - completed[ResolveKey(id=user_id, type=type)] = state.create_user(user_data) - else: - member_data['user'] = user_data - # Guild ID can't be None in this case. - # There's a type mismatch here that I don't actually care about - member = Member(state=state, guild=guild, data=member_data) # type: ignore - completed[ResolveKey(id=user_id, type=type)] = member - - type = AppCommandOptionType.role.value - completed.update( - { - # The guild ID can't be None in this case. - ResolveKey(id=role_id, type=type): Role(guild=guild, state=state, data=role_data) # type: ignore - for role_id, role_data in resolved.get('roles', {}).items() - } - ) - - type = AppCommandOptionType.channel.value - for (channel_id, channel_data) in resolved.get('channels', {}).items(): - key = ResolveKey(id=channel_id, type=type) - if channel_data['type'] in (10, 11, 12): - # The guild ID can't be none in this case - completed[key] = AppCommandThread(state=state, data=channel_data, guild_id=guild_id) # type: ignore - else: - # The guild ID can't be none in this case - completed[key] = AppCommandChannel(state=state, data=channel_data, guild_id=guild_id) # type: ignore - - type = AppCommandOptionType.attachment.value - completed.update( - { - ResolveKey(id=attachment_id, type=type): Attachment(data=attachment_data, state=state) - for attachment_id, attachment_data in resolved.get('attachments', {}).items() - } - ) - - guild = state._get_guild(guild_id) - for (message_id, message_data) in resolved.get('messages', {}).items(): - channel_id = int(message_data['channel_id']) - if guild is None: - channel = PartialMessageable(state=state, guild_id=guild_id, id=channel_id) - else: - channel = guild.get_channel_or_thread(channel_id) or PartialMessageable( - state=state, guild_id=guild_id, id=channel_id - ) - - # Type checker doesn't understand this due to failure to narrow - message = Message(state=state, channel=channel, data=message_data) # type: ignore - key = ResolveKey(id=message_id, type=-1) - completed[key] = message - - return completed - - def __repr__(self) -> str: - items = (f'{k}={v!r}' for k, v in self.__dict__.items()) - return '<{} {}>'.format(self.__class__.__name__, ' '.join(items)) - - def __eq__(self, other: object) -> bool: - if isinstance(self, Namespace) and isinstance(other, Namespace): - return self.__dict__ == other.__dict__ - return NotImplemented - - def __getitem__(self, key: str) -> Any: - return self.__dict__[key] - - def __contains__(self, key: str) -> Any: - return key in self.__dict__ - - def __getattr__(self, attr: str) -> Any: - return None - - def __iter__(self) -> Iterator[Tuple[str, Any]]: - yield from self.__dict__.items() - - def _update_with_defaults(self, defaults: Iterable[Tuple[str, Any]]) -> None: - for key, value in defaults: - self.__dict__.setdefault(key, value) diff --git a/.venv/Lib/site-packages/discord/app_commands/transformers.py b/.venv/Lib/site-packages/discord/app_commands/transformers.py deleted file mode 100644 index 8f00918..0000000 --- a/.venv/Lib/site-packages/discord/app_commands/transformers.py +++ /dev/null @@ -1,877 +0,0 @@ -""" -The MIT License (MIT) - -Copyright (c) 2015-present Rapptz - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the "Software"), -to deal in the Software without restriction, including without limitation -the rights to use, copy, modify, merge, publish, distribute, sublicense, -and/or sell copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. -""" - -from __future__ import annotations -import inspect - -from dataclasses import dataclass -from enum import Enum -from typing import ( - TYPE_CHECKING, - Any, - Callable, - ClassVar, - Coroutine, - Dict, - List, - Literal, - Optional, - Set, - Tuple, - Type, - TypeVar, - Union, -) - -from .errors import AppCommandError, TransformerError -from .models import AppCommandChannel, AppCommandThread, Choice -from .translator import TranslationContextLocation, TranslationContext, Translator, locale_str -from ..channel import StageChannel, VoiceChannel, TextChannel, CategoryChannel, ForumChannel -from ..abc import GuildChannel -from ..threads import Thread -from ..enums import Enum as InternalEnum, AppCommandOptionType, ChannelType, Locale -from ..utils import MISSING, maybe_coroutine -from ..user import User -from ..role import Role -from ..member import Member -from ..message import Attachment - -__all__ = ( - 'Transformer', - 'Transform', - 'Range', -) - -T = TypeVar('T') -FuncT = TypeVar('FuncT', bound=Callable[..., Any]) -ChoiceT = TypeVar('ChoiceT', str, int, float, Union[str, int, float]) -NoneType = type(None) - -if TYPE_CHECKING: - from ..interactions import Interaction - from .commands import Parameter - - -@dataclass -class CommandParameter: - # The name of the parameter is *always* the parameter name in the code - # Therefore, it can't be Union[str, locale_str] - name: str = MISSING - description: Union[str, locale_str] = MISSING - required: bool = MISSING - default: Any = MISSING - choices: List[Choice[Union[str, int, float]]] = MISSING - type: AppCommandOptionType = MISSING - channel_types: List[ChannelType] = MISSING - min_value: Optional[Union[int, float]] = None - max_value: Optional[Union[int, float]] = None - autocomplete: Optional[Callable[..., Coroutine[Any, Any, Any]]] = None - _rename: Union[str, locale_str] = MISSING - _annotation: Any = MISSING - - async def get_translated_payload(self, translator: Translator, data: Parameter) -> Dict[str, Any]: - base = self.to_dict() - - rename = self._rename - description = self.description - needs_name_translations = isinstance(rename, locale_str) - needs_description_translations = isinstance(description, locale_str) - name_localizations: Dict[str, str] = {} - description_localizations: Dict[str, str] = {} - - # Prevent creating these objects in a heavy loop - name_context = TranslationContext(location=TranslationContextLocation.parameter_name, data=data) - description_context = TranslationContext(location=TranslationContextLocation.parameter_description, data=data) - for locale in Locale: - if needs_name_translations: - translation = await translator._checked_translate(rename, locale, name_context) - if translation is not None: - name_localizations[locale.value] = translation - - if needs_description_translations: - translation = await translator._checked_translate(description, locale, description_context) - if translation is not None: - description_localizations[locale.value] = translation - - if self.choices: - base['choices'] = [await choice.get_translated_payload(translator) for choice in self.choices] - - if name_localizations: - base['name_localizations'] = name_localizations - - if description_localizations: - base['description_localizations'] = description_localizations - - return base - - def to_dict(self) -> Dict[str, Any]: - base = { - 'type': self.type.value, - 'name': self.display_name, - 'description': str(self.description), - 'required': self.required, - } - - if self.choices: - base['choices'] = [choice.to_dict() for choice in self.choices] - if self.channel_types: - base['channel_types'] = [t.value for t in self.channel_types] - if self.autocomplete: - base['autocomplete'] = True - - min_key, max_key = ( - ('min_value', 'max_value') if self.type is not AppCommandOptionType.string else ('min_length', 'max_length') - ) - if self.min_value is not None: - base[min_key] = self.min_value - if self.max_value is not None: - base[max_key] = self.max_value - - return base - - def _convert_to_locale_strings(self) -> None: - if self._rename is MISSING: - self._rename = locale_str(self.name) - elif isinstance(self._rename, str): - self._rename = locale_str(self._rename) - - if isinstance(self.description, str): - self.description = locale_str(self.description) - - if self.choices: - for choice in self.choices: - if choice._locale_name is None: - choice._locale_name = locale_str(choice.name) - - def is_choice_annotation(self) -> bool: - return getattr(self._annotation, '__discord_app_commands_is_choice__', False) - - async def transform(self, interaction: Interaction, value: Any, /) -> Any: - if hasattr(self._annotation, '__discord_app_commands_transformer__'): - # This one needs special handling for type safety reasons - if self._annotation.__discord_app_commands_is_choice__: - choice = next((c for c in self.choices if c.value == value), None) - if choice is None: - raise TransformerError(value, self.type, self._annotation) - return choice - - try: - # ParamSpec doesn't understand that transform is a callable since it's unbound - return await maybe_coroutine(self._annotation.transform, interaction, value) # type: ignore - except AppCommandError: - raise - except Exception as e: - raise TransformerError(value, self.type, self._annotation) from e - - return value - - @property - def display_name(self) -> str: - """:class:`str`: The name of the parameter as it should be displayed to the user.""" - return self.name if self._rename is MISSING else str(self._rename) - - -class Transformer: - """The base class that allows a type annotation in an application command parameter - to map into a :class:`~discord.AppCommandOptionType` and transform the raw value into one - from this type. - - This class is customisable through the overriding of methods and properties in the class - and by using it as the second type parameter of the :class:`~discord.app_commands.Transform` - class. For example, to convert a string into a custom pair type: - - .. code-block:: python3 - - class Point(typing.NamedTuple): - x: int - y: int - - class PointTransformer(app_commands.Transformer): - async def transform(self, interaction: discord.Interaction, value: str) -> Point: - (x, _, y) = value.partition(',') - return Point(x=int(x.strip()), y=int(y.strip())) - - @app_commands.command() - async def graph( - interaction: discord.Interaction, - point: app_commands.Transform[Point, PointTransformer], - ): - await interaction.response.send_message(str(point)) - - If a class is passed instead of an instance to the second type parameter, then it is - constructed with no arguments passed to the ``__init__`` method. - - .. versionadded:: 2.0 - """ - - __discord_app_commands_transformer__: ClassVar[bool] = True - __discord_app_commands_is_choice__: ClassVar[bool] = False - - # This is needed to pass typing's type checks. - # e.g. Optional[MyTransformer] - def __call__(self) -> None: - pass - - def __or__(self, rhs: Any) -> Any: - return Union[self, rhs] # type: ignore - - @property - def type(self) -> AppCommandOptionType: - """:class:`~discord.AppCommandOptionType`: The option type associated with this transformer. - - This must be a :obj:`property`. - - Defaults to :attr:`~discord.AppCommandOptionType.string`. - """ - return AppCommandOptionType.string - - @property - def channel_types(self) -> List[ChannelType]: - """List[:class:`~discord.ChannelType`]: A list of channel types that are allowed to this parameter. - - Only valid if the :meth:`type` returns :attr:`~discord.AppCommandOptionType.channel`. - - This must be a :obj:`property`. - - Defaults to an empty list. - """ - return [] - - @property - def min_value(self) -> Optional[Union[int, float]]: - """Optional[:class:`int`]: The minimum supported value for this parameter. - - Only valid if the :meth:`type` returns :attr:`~discord.AppCommandOptionType.number` - :attr:`~discord.AppCommandOptionType.integer`, or :attr:`~discord.AppCommandOptionType.string`. - - This must be a :obj:`property`. - - Defaults to ``None``. - """ - return None - - @property - def max_value(self) -> Optional[Union[int, float]]: - """Optional[:class:`int`]: The maximum supported value for this parameter. - - Only valid if the :meth:`type` returns :attr:`~discord.AppCommandOptionType.number` - :attr:`~discord.AppCommandOptionType.integer`, or :attr:`~discord.AppCommandOptionType.string`. - - This must be a :obj:`property`. - - Defaults to ``None``. - """ - return None - - @property - def choices(self) -> Optional[List[Choice[Union[int, float, str]]]]: - """Optional[List[:class:`~discord.app_commands.Choice`]]: A list of up to 25 choices that are allowed to this parameter. - - Only valid if the :meth:`type` returns :attr:`~discord.AppCommandOptionType.number` - :attr:`~discord.AppCommandOptionType.integer`, or :attr:`~discord.AppCommandOptionType.string`. - - This must be a :obj:`property`. - - Defaults to ``None``. - """ - return None - - @property - def _error_display_name(self) -> str: - name = self.__class__.__name__ - if name.endswith('Transformer'): - return name[:-11] - else: - return name - - async def transform(self, interaction: Interaction, value: Any, /) -> Any: - """|maybecoro| - - Transforms the converted option value into another value. - - The value passed into this transform function is the same as the - one in the :class:`conversion table `. - - Parameters - ----------- - interaction: :class:`~discord.Interaction` - The interaction being handled. - value: Any - The value of the given argument after being resolved. - See the :class:`conversion table ` - for how certain option types correspond to certain values. - """ - raise NotImplementedError('Derived classes need to implement this.') - - async def autocomplete( - self, interaction: Interaction, value: Union[int, float, str], / - ) -> List[Choice[Union[int, float, str]]]: - """|coro| - - An autocomplete prompt handler to be automatically used by options using this transformer. - - .. note:: - - Autocomplete is only supported for options with a :meth:`~discord.app_commands.Transformer.type` - of :attr:`~discord.AppCommandOptionType.string`, :attr:`~discord.AppCommandOptionType.integer`, - or :attr:`~discord.AppCommandOptionType.number`. - - Parameters - ----------- - interaction: :class:`~discord.Interaction` - The autocomplete interaction being handled. - value: Union[:class:`str`, :class:`int`, :class:`float`] - The current value entered by the user. - - Returns - -------- - List[:class:`~discord.app_commands.Choice`] - A list of choices to be displayed to the user, a maximum of 25. - - """ - raise NotImplementedError('Derived classes can implement this.') - - -class IdentityTransformer(Transformer): - def __init__(self, type: AppCommandOptionType) -> None: - self._type = type - - @property - def type(self) -> AppCommandOptionType: - return self._type - - async def transform(self, interaction: Interaction, value: Any, /) -> Any: - return value - - -class RangeTransformer(IdentityTransformer): - def __init__( - self, - opt_type: AppCommandOptionType, - *, - min: Optional[Union[int, float]] = None, - max: Optional[Union[int, float]] = None, - ) -> None: - if min and max and min > max: - raise TypeError('minimum cannot be larger than maximum') - - self._min: Optional[Union[int, float]] = min - self._max: Optional[Union[int, float]] = max - super().__init__(opt_type) - - @property - def min_value(self) -> Optional[Union[int, float]]: - return self._min - - @property - def max_value(self) -> Optional[Union[int, float]]: - return self._max - - -class LiteralTransformer(IdentityTransformer): - def __init__(self, values: Tuple[Any, ...]) -> None: - first = type(values[0]) - if first is int: - opt_type = AppCommandOptionType.integer - elif first is float: - opt_type = AppCommandOptionType.number - elif first is str: - opt_type = AppCommandOptionType.string - else: - raise TypeError(f'expected int, str, or float values not {first!r}') - - self._choices = [Choice(name=str(v), value=v) for v in values] - super().__init__(opt_type) - - @property - def choices(self): - return self._choices - - -class ChoiceTransformer(IdentityTransformer): - __discord_app_commands_is_choice__: ClassVar[bool] = True - - def __init__(self, inner_type: Any) -> None: - if inner_type is int: - opt_type = AppCommandOptionType.integer - elif inner_type is float: - opt_type = AppCommandOptionType.number - elif inner_type is str: - opt_type = AppCommandOptionType.string - else: - raise TypeError(f'expected int, str, or float values not {inner_type!r}') - - super().__init__(opt_type) - - -class EnumValueTransformer(Transformer): - def __init__(self, enum: Any) -> None: - super().__init__() - - values = list(enum) - if len(values) < 2: - raise TypeError('enum.Enum requires at least two values.') - - first = type(values[0].value) - if first is int: - opt_type = AppCommandOptionType.integer - elif first is float: - opt_type = AppCommandOptionType.number - elif first is str: - opt_type = AppCommandOptionType.string - else: - raise TypeError(f'expected int, str, or float values not {first!r}') - - self._type: AppCommandOptionType = opt_type - self._enum: Any = enum - self._choices = [Choice(name=v.name, value=v.value) for v in values] - - @property - def _error_display_name(self) -> str: - return self._enum.__name__ - - @property - def type(self) -> AppCommandOptionType: - return self._type - - @property - def choices(self): - return self._choices - - async def transform(self, interaction: Interaction, value: Any, /) -> Any: - return self._enum(value) - - -class EnumNameTransformer(Transformer): - def __init__(self, enum: Any) -> None: - super().__init__() - - values = list(enum) - if len(values) < 2: - raise TypeError('enum.Enum requires at least two values.') - - self._enum: Any = enum - self._choices = [Choice(name=v.name, value=v.name) for v in values] - - @property - def _error_display_name(self) -> str: - return self._enum.__name__ - - @property - def type(self) -> AppCommandOptionType: - return AppCommandOptionType.string - - @property - def choices(self): - return self._choices - - async def transform(self, interaction: Interaction, value: Any, /) -> Any: - return self._enum[value] - - -class InlineTransformer(Transformer): - def __init__(self, annotation: Any) -> None: - super().__init__() - self.annotation: Any = annotation - - @property - def _error_display_name(self) -> str: - return self.annotation.__name__ - - @property - def type(self) -> AppCommandOptionType: - return AppCommandOptionType.string - - async def transform(self, interaction: Interaction, value: Any, /) -> Any: - return await self.annotation.transform(interaction, value) - - -if TYPE_CHECKING: - from typing_extensions import Annotated as Transform - from typing_extensions import Annotated as Range -else: - - class Transform: - """A type annotation that can be applied to a parameter to customise the behaviour of - an option type by transforming with the given :class:`Transformer`. This requires - the usage of two generic parameters, the first one is the type you're converting to and the second - one is the type of the :class:`Transformer` actually doing the transformation. - - During type checking time this is equivalent to :obj:`typing.Annotated` so type checkers understand - the intent of the code. - - For example usage, check :class:`Transformer`. - - .. versionadded:: 2.0 - """ - - def __class_getitem__(cls, items) -> _TransformMetadata: - if not isinstance(items, tuple): - raise TypeError(f'expected tuple for arguments, received {items.__class__.__name__} instead') - - if len(items) != 2: - raise TypeError('Transform only accepts exactly two arguments') - - _, transformer = items - - if inspect.isclass(transformer): - if not issubclass(transformer, Transformer): - raise TypeError(f'second argument of Transform must be a Transformer class not {transformer!r}') - transformer = transformer() - elif not isinstance(transformer, Transformer): - raise TypeError(f'second argument of Transform must be a Transformer not {transformer.__class__.__name__}') - - return transformer - - class Range: - """A type annotation that can be applied to a parameter to require a numeric or string - type to fit within the range provided. - - During type checking time this is equivalent to :obj:`typing.Annotated` so type checkers understand - the intent of the code. - - Some example ranges: - - - ``Range[int, 10]`` means the minimum is 10 with no maximum. - - ``Range[int, None, 10]`` means the maximum is 10 with no minimum. - - ``Range[int, 1, 10]`` means the minimum is 1 and the maximum is 10. - - ``Range[float, 1.0, 5.0]`` means the minimum is 1.0 and the maximum is 5.0. - - ``Range[str, 1, 10]`` means the minimum length is 1 and the maximum length is 10. - - .. versionadded:: 2.0 - - Examples - ---------- - - .. code-block:: python3 - - @app_commands.command() - async def range(interaction: discord.Interaction, value: app_commands.Range[int, 10, 12]): - await interaction.response.send_message(f'Your value is {value}', ephemeral=True) - """ - - def __class_getitem__(cls, obj) -> _TransformMetadata: - if not isinstance(obj, tuple): - raise TypeError(f'expected tuple for arguments, received {obj.__class__.__name__} instead') - - if len(obj) == 2: - obj = (*obj, None) - elif len(obj) != 3: - raise TypeError('Range accepts either two or three arguments with the first being the type of range.') - - obj_type, min, max = obj - - if min is None and max is None: - raise TypeError('Range must not be empty') - - if min is not None and max is not None: - # At this point max and min are both not none - if type(min) != type(max): - raise TypeError('Both min and max in Range must be the same type') - - if obj_type is int: - opt_type = AppCommandOptionType.integer - elif obj_type is float: - opt_type = AppCommandOptionType.number - elif obj_type is str: - opt_type = AppCommandOptionType.string - else: - raise TypeError(f'expected int, float, or str as range type, received {obj_type!r} instead') - - if obj_type in (str, int): - cast = int - else: - cast = float - - transformer = RangeTransformer( - opt_type, - min=cast(min) if min is not None else None, - max=cast(max) if max is not None else None, - ) - return transformer - - -class MemberTransformer(Transformer): - @property - def type(self) -> AppCommandOptionType: - return AppCommandOptionType.user - - async def transform(self, interaction: Interaction, value: Any, /) -> Member: - if not isinstance(value, Member): - raise TransformerError(value, self.type, self) - return value - - -class BaseChannelTransformer(Transformer): - def __init__(self, *channel_types: Type[Any]) -> None: - super().__init__() - if len(channel_types) == 1: - display_name = channel_types[0].__name__ - types = CHANNEL_TO_TYPES[channel_types[0]] - else: - display_name = '{}, and {}'.format(', '.join(t.__name__ for t in channel_types[:-1]), channel_types[-1].__name__) - types = [] - - for t in channel_types: - try: - types.extend(CHANNEL_TO_TYPES[t]) - except KeyError: - raise TypeError('Union type of channels must be entirely made up of channels') from None - - self._types: Tuple[Type[Any]] = channel_types - self._channel_types: List[ChannelType] = types - self._display_name = display_name - - @property - def _error_display_name(self) -> str: - return self._display_name - - @property - def type(self) -> AppCommandOptionType: - return AppCommandOptionType.channel - - @property - def channel_types(self) -> List[ChannelType]: - return self._channel_types - - async def transform(self, interaction: Interaction, value: Any, /): - resolved = value.resolve() - if resolved is None or not isinstance(resolved, self._types): - raise TransformerError(value, AppCommandOptionType.channel, self) - return resolved - - -class RawChannelTransformer(BaseChannelTransformer): - async def transform(self, interaction: Interaction, value: Any, /): - if not isinstance(value, self._types): - raise TransformerError(value, AppCommandOptionType.channel, self) - return value - - -class UnionChannelTransformer(BaseChannelTransformer): - async def transform(self, interaction: Interaction, value: Any, /): - if isinstance(value, self._types): - return value - - resolved = value.resolve() - if resolved is None or not isinstance(resolved, self._types): - raise TransformerError(value, AppCommandOptionType.channel, self) - return resolved - - -CHANNEL_TO_TYPES: Dict[Any, List[ChannelType]] = { - AppCommandChannel: [ - ChannelType.stage_voice, - ChannelType.voice, - ChannelType.text, - ChannelType.news, - ChannelType.category, - ChannelType.forum, - ], - GuildChannel: [ - ChannelType.stage_voice, - ChannelType.voice, - ChannelType.text, - ChannelType.news, - ChannelType.category, - ChannelType.forum, - ], - AppCommandThread: [ChannelType.news_thread, ChannelType.private_thread, ChannelType.public_thread], - Thread: [ChannelType.news_thread, ChannelType.private_thread, ChannelType.public_thread], - StageChannel: [ChannelType.stage_voice], - VoiceChannel: [ChannelType.voice], - TextChannel: [ChannelType.text, ChannelType.news], - CategoryChannel: [ChannelType.category], - ForumChannel: [ChannelType.forum], -} - -BUILT_IN_TRANSFORMERS: Dict[Any, Transformer] = { - str: IdentityTransformer(AppCommandOptionType.string), - int: IdentityTransformer(AppCommandOptionType.integer), - float: IdentityTransformer(AppCommandOptionType.number), - bool: IdentityTransformer(AppCommandOptionType.boolean), - User: IdentityTransformer(AppCommandOptionType.user), - Member: MemberTransformer(), - Role: IdentityTransformer(AppCommandOptionType.role), - AppCommandChannel: RawChannelTransformer(AppCommandChannel), - AppCommandThread: RawChannelTransformer(AppCommandThread), - GuildChannel: BaseChannelTransformer(GuildChannel), - Thread: BaseChannelTransformer(Thread), - StageChannel: BaseChannelTransformer(StageChannel), - VoiceChannel: BaseChannelTransformer(VoiceChannel), - TextChannel: BaseChannelTransformer(TextChannel), - CategoryChannel: BaseChannelTransformer(CategoryChannel), - ForumChannel: BaseChannelTransformer(ForumChannel), - Attachment: IdentityTransformer(AppCommandOptionType.attachment), -} - -ALLOWED_DEFAULTS: Dict[AppCommandOptionType, Tuple[Type[Any], ...]] = { - AppCommandOptionType.string: (str, NoneType), - AppCommandOptionType.integer: (int, NoneType), - AppCommandOptionType.boolean: (bool, NoneType), - AppCommandOptionType.number: (float, NoneType), -} - - -def get_supported_annotation( - annotation: Any, - *, - _none: type = NoneType, - _mapping: Dict[Any, Transformer] = BUILT_IN_TRANSFORMERS, -) -> Tuple[Any, Any, bool]: - """Returns an appropriate, yet supported, annotation along with an optional default value. - - The third boolean element of the tuple indicates if default values should be validated. - - This differs from the built in mapping by supporting a few more things. - Likewise, this returns a "transformed" annotation that is ready to use with CommandParameter.transform. - """ - - try: - return (_mapping[annotation], MISSING, True) - except (KeyError, TypeError): - pass - - if isinstance(annotation, Transformer): - return (annotation, MISSING, False) - - if inspect.isclass(annotation): - if issubclass(annotation, Transformer): - return (annotation(), MISSING, False) - if issubclass(annotation, (Enum, InternalEnum)): - if all(isinstance(v.value, (str, int, float)) for v in annotation): - return (EnumValueTransformer(annotation), MISSING, False) - else: - return (EnumNameTransformer(annotation), MISSING, False) - if annotation is Choice: - raise TypeError('Choice requires a type argument of int, str, or float') - - # Check if a transform @classmethod is given to the class - # These flatten into simple "inline" transformers with implicit strings - transform_classmethod = annotation.__dict__.get('transform', None) - if isinstance(transform_classmethod, classmethod): - params = inspect.signature(transform_classmethod.__func__).parameters - if len(params) != 3: - raise TypeError('Inline transformer with transform classmethod requires 3 parameters') - if not inspect.iscoroutinefunction(transform_classmethod.__func__): - raise TypeError('Inline transformer with transform classmethod must be a coroutine') - return (InlineTransformer(annotation), MISSING, False) - - # Check if there's an origin - origin = getattr(annotation, '__origin__', None) - if origin is Literal: - args = annotation.__args__ # type: ignore - return (LiteralTransformer(args), MISSING, True) - - if origin is Choice: - arg = annotation.__args__[0] # type: ignore - return (ChoiceTransformer(arg), MISSING, True) - - if origin is not Union: - # Only Union/Optional is supported right now so bail early - raise TypeError(f'unsupported type annotation {annotation!r}') - - default = MISSING - args = annotation.__args__ # type: ignore - if args[-1] is _none: - if len(args) == 2: - underlying = args[0] - inner, _, validate_default = get_supported_annotation(underlying) - if inner is None: - raise TypeError(f'unsupported inner optional type {underlying!r}') - return (inner, None, validate_default) - else: - args = args[:-1] - default = None - - # Check for channel union types - if any(arg in CHANNEL_TO_TYPES for arg in args): - # If any channel type is given, then *all* must be channel types - return (UnionChannelTransformer(*args), default, True) - - # The only valid transformations here are: - # [Member, User] => user - # [Member, User, Role] => mentionable - # [Member | User, Role] => mentionable - supported_types: Set[Any] = {Role, Member, User} - if not all(arg in supported_types for arg in args): - raise TypeError(f'unsupported types given inside {annotation!r}') - if args == (User, Member) or args == (Member, User): - return (IdentityTransformer(AppCommandOptionType.user), default, True) - - return (IdentityTransformer(AppCommandOptionType.mentionable), default, True) - - -def annotation_to_parameter(annotation: Any, parameter: inspect.Parameter) -> CommandParameter: - """Returns the appropriate :class:`CommandParameter` for the given annotation. - - The resulting ``_annotation`` attribute might not match the one given here and might - be transformed in order to be easier to call from the ``transform`` asynchronous function - of a command parameter. - """ - - (inner, default, validate_default) = get_supported_annotation(annotation) - type = inner.type - - if default is MISSING or default is None: - param_default = parameter.default - if param_default is not parameter.empty: - default = param_default - - # Verify validity of the default parameter - if default is not MISSING and validate_default: - valid_types: Tuple[Any, ...] = ALLOWED_DEFAULTS.get(type, (NoneType,)) - if not isinstance(default, valid_types): - raise TypeError(f'invalid default parameter type given ({default.__class__}), expected {valid_types}') - - result = CommandParameter( - type=type, - _annotation=inner, - default=default, - required=default is MISSING, - name=parameter.name, - ) - - choices = inner.choices - if choices is not None: - result.choices = choices - - # These methods should be duck typed - if type in (AppCommandOptionType.number, AppCommandOptionType.string, AppCommandOptionType.integer): - result.min_value = inner.min_value - result.max_value = inner.max_value - - if type is AppCommandOptionType.channel: - result.channel_types = inner.channel_types - - if parameter.kind in (parameter.POSITIONAL_ONLY, parameter.VAR_KEYWORD, parameter.VAR_POSITIONAL): - raise TypeError(f'unsupported parameter kind in callback: {parameter.kind!s}') - - # Check if the method is overridden - if inner.autocomplete.__func__ is not Transformer.autocomplete: - from .commands import validate_auto_complete_callback - - result.autocomplete = validate_auto_complete_callback(inner.autocomplete) - - return result diff --git a/.venv/Lib/site-packages/discord/app_commands/translator.py b/.venv/Lib/site-packages/discord/app_commands/translator.py deleted file mode 100644 index 1741054..0000000 --- a/.venv/Lib/site-packages/discord/app_commands/translator.py +++ /dev/null @@ -1,305 +0,0 @@ -""" -The MIT License (MIT) - -Copyright (c) 2015-present Rapptz - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the "Software"), -to deal in the Software without restriction, including without limitation -the rights to use, copy, modify, merge, publish, distribute, sublicense, -and/or sell copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. -""" - -from __future__ import annotations -from typing import TYPE_CHECKING, Any, Generic, Literal, Optional, TypeVar, Union, overload -from .errors import TranslationError -from ..enums import Enum, Locale - - -if TYPE_CHECKING: - from .commands import Command, ContextMenu, Group, Parameter - from .models import Choice - - -__all__ = ( - 'TranslationContextLocation', - 'TranslationContextTypes', - 'TranslationContext', - 'Translator', - 'locale_str', -) - - -class TranslationContextLocation(Enum): - command_name = 0 - command_description = 1 - group_name = 2 - group_description = 3 - parameter_name = 4 - parameter_description = 5 - choice_name = 6 - other = 7 - - -_L = TypeVar('_L', bound=TranslationContextLocation) -_D = TypeVar('_D') - - -class TranslationContext(Generic[_L, _D]): - """A class that provides context for the :class:`locale_str` being translated. - - This is useful to determine where exactly the string is located and aid in looking - up the actual translation. - - Attributes - ----------- - location: :class:`TranslationContextLocation` - The location where this string is located. - data: Any - The extraneous data that is being translated. - """ - - __slots__ = ('location', 'data') - - @overload - def __init__( - self, location: Literal[TranslationContextLocation.command_name], data: Union[Command[Any, ..., Any], ContextMenu] - ) -> None: - ... - - @overload - def __init__( - self, location: Literal[TranslationContextLocation.command_description], data: Command[Any, ..., Any] - ) -> None: - ... - - @overload - def __init__( - self, - location: Literal[TranslationContextLocation.group_name, TranslationContextLocation.group_description], - data: Group, - ) -> None: - ... - - @overload - def __init__( - self, - location: Literal[TranslationContextLocation.parameter_name, TranslationContextLocation.parameter_description], - data: Parameter, - ) -> None: - ... - - @overload - def __init__(self, location: Literal[TranslationContextLocation.choice_name], data: Choice[Any]) -> None: - ... - - @overload - def __init__(self, location: Literal[TranslationContextLocation.other], data: Any) -> None: - ... - - def __init__(self, location: _L, data: _D) -> None: - self.location: _L = location - self.data: _D = data - - -# For type checking purposes, it makes sense to allow the user to leverage type narrowing -# So code like this works as expected: -# -# if context.type == TranslationContextLocation.command_name: -# reveal_type(context.data) # Revealed type is Command | ContextMenu -# -# This requires a union of types -CommandNameTranslationContext = TranslationContext[ - Literal[TranslationContextLocation.command_name], Union['Command[Any, ..., Any]', 'ContextMenu'] -] -CommandDescriptionTranslationContext = TranslationContext[ - Literal[TranslationContextLocation.command_description], 'Command[Any, ..., Any]' -] -GroupTranslationContext = TranslationContext[ - Literal[TranslationContextLocation.group_name, TranslationContextLocation.group_description], 'Group' -] -ParameterTranslationContext = TranslationContext[ - Literal[TranslationContextLocation.parameter_name, TranslationContextLocation.parameter_description], 'Parameter' -] -ChoiceTranslationContext = TranslationContext[Literal[TranslationContextLocation.choice_name], 'Choice[Any]'] -OtherTranslationContext = TranslationContext[Literal[TranslationContextLocation.other], Any] - -TranslationContextTypes = Union[ - CommandNameTranslationContext, - CommandDescriptionTranslationContext, - GroupTranslationContext, - ParameterTranslationContext, - ChoiceTranslationContext, - OtherTranslationContext, -] - - -class Translator: - """A class that handles translations for commands, parameters, and choices. - - Translations are done lazily in order to allow for async enabled translations as well - as supporting a wide array of translation systems such as :mod:`gettext` and - `Project Fluent `_. - - In order for a translator to be used, it must be set using the :meth:`CommandTree.set_translator` - method. The translation flow for a string is as follows: - - 1. Use :class:`locale_str` instead of :class:`str` in areas of a command you want to be translated. - - Currently, these are command names, command descriptions, parameter names, parameter descriptions, and choice names. - - This can also be used inside the :func:`~discord.app_commands.describe` decorator. - 2. Call :meth:`CommandTree.set_translator` to the translator instance that will handle the translations. - 3. Call :meth:`CommandTree.sync` - 4. The library will call :meth:`Translator.translate` on all the relevant strings being translated. - - .. versionadded:: 2.0 - """ - - async def load(self) -> None: - """|coro| - - An asynchronous setup function for loading the translation system. - - The default implementation does nothing. - - This is invoked when :meth:`CommandTree.set_translator` is called. - """ - pass - - async def unload(self) -> None: - """|coro| - - An asynchronous teardown function for unloading the translation system. - - The default implementation does nothing. - - This is invoked when :meth:`CommandTree.set_translator` is called - if a tree already has a translator or when :meth:`discord.Client.close` is called. - """ - pass - - async def _checked_translate( - self, string: locale_str, locale: Locale, context: TranslationContextTypes - ) -> Optional[str]: - try: - return await self.translate(string, locale, context) - except TranslationError: - raise - except Exception as e: - raise TranslationError(string=string, locale=locale, context=context) from e - - async def translate(self, string: locale_str, locale: Locale, context: TranslationContextTypes) -> Optional[str]: - """|coro| - - Translates the given string to the specified locale. - - If the string cannot be translated, ``None`` should be returned. - - The default implementation returns ``None``. - - If an exception is raised in this method, it should inherit from :exc:`TranslationError`. - If it doesn't, then when this is called the exception will be chained with it instead. - - Parameters - ------------ - string: :class:`locale_str` - The string being translated. - locale: :class:`~discord.Locale` - The locale being requested for translation. - context: :class:`TranslationContext` - The translation context where the string originated from. - For better type checking ergonomics, the ``TranslationContextTypes`` - type can be used instead to aid with type narrowing. It is functionally - equivalent to :class:`TranslationContext`. - """ - - return None - - -class locale_str: - """Marks a string as ready for translation. - - This is done lazily and is not actually translated until :meth:`CommandTree.sync` is called. - - The sync method then ultimately defers the responsibility of translating to the :class:`Translator` - instance used by the :class:`CommandTree`. For more information on the translation flow, see the - :class:`Translator` documentation. - - .. container:: operations - - .. describe:: str(x) - - Returns the message passed to the string. - - .. describe:: x == y - - Checks if the string is equal to another string. - - .. describe:: x != y - - Checks if the string is not equal to another string. - - .. describe:: hash(x) - - Returns the hash of the string. - - .. versionadded:: 2.0 - - Attributes - ------------ - message: :class:`str` - The message being translated. Once set, this cannot be changed. - - .. warning:: - - This must be the default "message" that you send to Discord. - Discord sends this message back to the library and the library - uses it to access the data in order to dispatch commands. - - For example, in a command name context, if the command - name is ``foo`` then the message *must* also be ``foo``. - For other translation systems that require a message ID such - as Fluent, consider using a keyword argument to pass it in. - extras: :class:`dict` - A dict of user provided extras to attach to the translated string. - This can be used to add more context, information, or any metadata necessary - to aid in actually translating the string. - - Since these are passed via keyword arguments, the keys are strings. - """ - - __slots__ = ('__message', 'extras') - - def __init__(self, message: str, /, **kwargs: Any) -> None: - self.__message: str = message - self.extras: dict[str, Any] = kwargs - - @property - def message(self) -> str: - return self.__message - - def __str__(self) -> str: - return self.__message - - def __repr__(self) -> str: - kwargs = ', '.join(f'{k}={v!r}' for k, v in self.extras.items()) - if kwargs: - return f'{self.__class__.__name__}({self.__message!r}, {kwargs})' - return f'{self.__class__.__name__}({self.__message!r})' - - def __eq__(self, obj: object) -> bool: - return isinstance(obj, locale_str) and self.message == obj.message - - def __hash__(self) -> int: - return hash(self.__message) diff --git a/.venv/Lib/site-packages/discord/app_commands/tree.py b/.venv/Lib/site-packages/discord/app_commands/tree.py deleted file mode 100644 index 5bdfbec..0000000 --- a/.venv/Lib/site-packages/discord/app_commands/tree.py +++ /dev/null @@ -1,1255 +0,0 @@ -""" -The MIT License (MIT) - -Copyright (c) 2015-present Rapptz - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the "Software"), -to deal in the Software without restriction, including without limitation -the rights to use, copy, modify, merge, publish, distribute, sublicense, -and/or sell copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. -""" - -from __future__ import annotations -import logging -import inspect - -from typing import ( - Any, - TYPE_CHECKING, - Callable, - Coroutine, - Dict, - Generator, - Generic, - List, - Literal, - Optional, - Sequence, - Set, - Tuple, - Union, - overload, -) -from collections import Counter - - -from .namespace import Namespace, ResolveKey -from .models import AppCommand -from .commands import Command, ContextMenu, Group -from .errors import ( - AppCommandError, - CommandAlreadyRegistered, - CommandNotFound, - CommandSignatureMismatch, - CommandLimitReached, - CommandSyncFailure, - MissingApplicationID, -) -from .translator import Translator, locale_str -from ..errors import ClientException, HTTPException -from ..enums import AppCommandType, InteractionType -from ..utils import MISSING, _get_as_snowflake, _is_submodule, _shorten -from .._types import ClientT - - -if TYPE_CHECKING: - from ..types.interactions import ApplicationCommandInteractionData, ApplicationCommandInteractionDataOption - from ..interactions import Interaction - from ..abc import Snowflake - from .commands import ContextMenuCallback, CommandCallback, P, T - - ErrorFunc = Callable[ - [Interaction, AppCommandError], - Coroutine[Any, Any, Any], - ] - -__all__ = ('CommandTree',) - -_log = logging.getLogger(__name__) - - -def _retrieve_guild_ids( - command: Any, guild: Optional[Snowflake] = MISSING, guilds: Sequence[Snowflake] = MISSING -) -> Optional[Set[int]]: - if guild is not MISSING and guilds is not MISSING: - raise TypeError('cannot mix guild and guilds keyword arguments') - - # guilds=[] or guilds=[...] - if guild is MISSING: - # If no arguments are given then it should default to the ones - # given to the guilds(...) decorator or None for global. - if guilds is MISSING: - return getattr(command, '_guild_ids', None) - - # guilds=[] is the same as global - if len(guilds) == 0: - return None - - return {g.id for g in guilds} - - # At this point it should be... - # guild=None or guild=Object - if guild is None: - return None - return {guild.id} - - -class CommandTree(Generic[ClientT]): - """Represents a container that holds application command information. - - Parameters - ----------- - client: :class:`~discord.Client` - The client instance to get application command information from. - fallback_to_global: :class:`bool` - If a guild-specific command is not found when invoked, then try falling back into - a global command in the tree. For example, if the tree locally has a ``/ping`` command - under the global namespace but the guild has a guild-specific ``/ping``, instead of failing - to find the guild-specific ``/ping`` command it will fall back to the global ``/ping`` command. - This has the potential to raise more :exc:`~discord.app_commands.CommandSignatureMismatch` errors - than usual. Defaults to ``True``. - """ - - def __init__(self, client: ClientT, *, fallback_to_global: bool = True): - self.client: ClientT = client - self._http = client.http - self._state = client._connection - - if self._state._command_tree is not None: - raise ClientException('This client already has an associated command tree.') - - self._state._command_tree = self - self.fallback_to_global: bool = fallback_to_global - self._guild_commands: Dict[int, Dict[str, Union[Command, Group]]] = {} - self._global_commands: Dict[str, Union[Command, Group]] = {} - # (name, guild_id, command_type): Command - # The above two mappings can use this structure too but we need fast retrieval - # by name and guild_id in the above case while here it isn't as important since - # it's uncommon and N=5 anyway. - self._context_menus: Dict[Tuple[str, Optional[int], int], ContextMenu] = {} - - async def fetch_command(self, command_id: int, /, *, guild: Optional[Snowflake] = None) -> AppCommand: - """|coro| - - Fetches an application command from the application. - - Parameters - ----------- - command_id: :class:`int` - The ID of the command to fetch. - guild: Optional[:class:`~discord.abc.Snowflake`] - The guild to fetch the command from. If not passed then the global command - is fetched instead. - - Raises - ------- - HTTPException - Fetching the command failed. - MissingApplicationID - The application ID could not be found. - NotFound - The application command was not found. - This could also be because the command is a guild command - and the guild was not specified and vice versa. - - Returns - -------- - :class:`~discord.app_commands.AppCommand` - The application command. - """ - if self.client.application_id is None: - raise MissingApplicationID - - if guild is None: - command = await self._http.get_global_command(self.client.application_id, command_id) - else: - command = await self._http.get_guild_command(self.client.application_id, guild.id, command_id) - - return AppCommand(data=command, state=self._state) - - async def fetch_commands(self, *, guild: Optional[Snowflake] = None) -> List[AppCommand]: - """|coro| - - Fetches the application's current commands. - - If no guild is passed then global commands are fetched, otherwise - the guild's commands are fetched instead. - - .. note:: - - This includes context menu commands. - - Parameters - ----------- - guild: Optional[:class:`~discord.abc.Snowflake`] - The guild to fetch the commands from. If not passed then global commands - are fetched instead. - - Raises - ------- - HTTPException - Fetching the commands failed. - MissingApplicationID - The application ID could not be found. - - Returns - -------- - List[:class:`~discord.app_commands.AppCommand`] - The application's commands. - """ - if self.client.application_id is None: - raise MissingApplicationID - - if guild is None: - commands = await self._http.get_global_commands(self.client.application_id) - else: - commands = await self._http.get_guild_commands(self.client.application_id, guild.id) - - return [AppCommand(data=data, state=self._state) for data in commands] - - def copy_global_to(self, *, guild: Snowflake) -> None: - """Copies all global commands to the specified guild. - - This method is mainly available for development purposes, as it allows you - to copy your global commands over to a testing guild easily. - - Note that this method will *override* pre-existing guild commands that would conflict. - - Parameters - ----------- - guild: :class:`~discord.abc.Snowflake` - The guild to copy the commands to. - - Raises - -------- - CommandLimitReached - The maximum number of commands was reached for that guild. - This is currently 100 for slash commands and 5 for context menu commands. - """ - - try: - mapping = self._guild_commands[guild.id].copy() - except KeyError: - mapping = {} - - mapping.update(self._global_commands) - if len(mapping) > 100: - raise CommandLimitReached(guild_id=guild.id, limit=100) - - ctx_menu: Dict[Tuple[str, Optional[int], int], ContextMenu] = { - (name, guild.id, cmd_type): cmd - for ((name, g, cmd_type), cmd) in self._context_menus.items() - if g is None or g == guild.id - } - - counter = Counter(cmd_type for _, _, cmd_type in ctx_menu) - for cmd_type, count in counter.items(): - if count > 5: - as_enum = AppCommandType(cmd_type) - raise CommandLimitReached(guild_id=guild.id, limit=5, type=as_enum) - - self._context_menus.update(ctx_menu) - self._guild_commands[guild.id] = mapping - - def add_command( - self, - command: Union[Command[Any, ..., Any], ContextMenu, Group], - /, - *, - guild: Optional[Snowflake] = MISSING, - guilds: Sequence[Snowflake] = MISSING, - override: bool = False, - ) -> None: - """Adds an application command to the tree. - - This only adds the command locally -- in order to sync the commands - and enable them in the client, :meth:`sync` must be called. - - The root parent of the command is added regardless of the type passed. - - Parameters - ----------- - command: Union[:class:`Command`, :class:`Group`] - The application command or group to add. - guild: Optional[:class:`~discord.abc.Snowflake`] - The guild to add the command to. If not given or ``None`` then it - becomes a global command instead. - guilds: List[:class:`~discord.abc.Snowflake`] - The list of guilds to add the command to. This cannot be mixed - with the ``guild`` parameter. If no guilds are given at all - then it becomes a global command instead. - override: :class:`bool` - Whether to override a command with the same name. If ``False`` - an exception is raised. Default is ``False``. - - Raises - -------- - ~discord.app_commands.CommandAlreadyRegistered - The command was already registered and no override was specified. - TypeError - The application command passed is not a valid application command. - Or, ``guild`` and ``guilds`` were both given. - CommandLimitReached - The maximum number of commands was reached globally or for that guild. - This is currently 100 for slash commands and 5 for context menu commands. - """ - - guild_ids = _retrieve_guild_ids(command, guild, guilds) - if isinstance(command, ContextMenu): - type = command.type.value - name = command.name - - def _context_menu_add_helper( - guild_id: Optional[int], - data: Dict[Tuple[str, Optional[int], int], ContextMenu], - name: str = name, - type: int = type, - ) -> None: - key = (name, guild_id, type) - found = key in self._context_menus - if found and not override: - raise CommandAlreadyRegistered(name, guild_id) - - # If the key is found and overridden then it shouldn't count as an extra addition - # read as `0 if override and found else 1` if confusing - to_add = not (override and found) - total = sum(1 for _, g, t in self._context_menus if g == guild_id and t == type) - if total + to_add > 5: - raise CommandLimitReached(guild_id=guild_id, limit=5, type=AppCommandType(type)) - data[key] = command - - if guild_ids is None: - _context_menu_add_helper(None, self._context_menus) - else: - current: Dict[Tuple[str, Optional[int], int], ContextMenu] = {} - for guild_id in guild_ids: - _context_menu_add_helper(guild_id, current) - - # Update at the end in order to make sure the update is atomic. - # An error during addition could end up making the context menu mapping - # have a partial state - self._context_menus.update(current) - return - elif not isinstance(command, (Command, Group)): - raise TypeError(f'Expected an application command, received {command.__class__.__name__} instead') - - # todo: validate application command groups having children (required) - - root = command.root_parent or command - name = root.name - if guild_ids is not None: - # Validate that the command can be added first, before actually - # adding it into the mapping. This ensures atomicity. - for guild_id in guild_ids: - commands = self._guild_commands.get(guild_id, {}) - found = name in commands - if found and not override: - raise CommandAlreadyRegistered(name, guild_id) - - to_add = not (override and found) - if len(commands) + to_add > 100: - raise CommandLimitReached(guild_id=guild_id, limit=100) - - # Actually add the command now that it has been verified to be okay. - for guild_id in guild_ids: - commands = self._guild_commands.setdefault(guild_id, {}) - commands[name] = root - else: - found = name in self._global_commands - if found and not override: - raise CommandAlreadyRegistered(name, None) - - to_add = not (override and found) - if len(self._global_commands) + to_add > 100: - raise CommandLimitReached(guild_id=None, limit=100) - self._global_commands[name] = root - - @overload - def remove_command( - self, - command: str, - /, - *, - guild: Optional[Snowflake] = ..., - type: Literal[AppCommandType.message, AppCommandType.user], - ) -> Optional[ContextMenu]: - ... - - @overload - def remove_command( - self, - command: str, - /, - *, - guild: Optional[Snowflake] = ..., - type: Literal[AppCommandType.chat_input] = ..., - ) -> Optional[Union[Command[Any, ..., Any], Group]]: - ... - - @overload - def remove_command( - self, - command: str, - /, - *, - guild: Optional[Snowflake] = ..., - type: AppCommandType, - ) -> Optional[Union[Command[Any, ..., Any], ContextMenu, Group]]: - ... - - def remove_command( - self, - command: str, - /, - *, - guild: Optional[Snowflake] = None, - type: AppCommandType = AppCommandType.chat_input, - ) -> Optional[Union[Command[Any, ..., Any], ContextMenu, Group]]: - """Removes an application command from the tree. - - This only removes the command locally -- in order to sync the commands - and remove them in the client, :meth:`sync` must be called. - - Parameters - ----------- - command: :class:`str` - The name of the root command to remove. - guild: Optional[:class:`~discord.abc.Snowflake`] - The guild to remove the command from. If not given or ``None`` then it - removes a global command instead. - type: :class:`~discord.AppCommandType` - The type of command to remove. Defaults to :attr:`~discord.AppCommandType.chat_input`, - i.e. slash commands. - - Returns - --------- - Optional[Union[:class:`Command`, :class:`ContextMenu`, :class:`Group`]] - The application command that got removed. - If nothing was removed then ``None`` is returned instead. - """ - - if type is AppCommandType.chat_input: - if guild is None: - return self._global_commands.pop(command, None) - else: - try: - commands = self._guild_commands[guild.id] - except KeyError: - return None - else: - return commands.pop(command, None) - elif type in (AppCommandType.user, AppCommandType.message): - guild_id = None if guild is None else guild.id - key = (command, guild_id, type.value) - return self._context_menus.pop(key, None) - - def clear_commands(self, *, guild: Optional[Snowflake], type: Optional[AppCommandType] = None) -> None: - """Clears all application commands from the tree. - - This only removes the commands locally -- in order to sync the commands - and remove them in the client, :meth:`sync` must be called. - - Parameters - ----------- - guild: Optional[:class:`~discord.abc.Snowflake`] - The guild to remove the commands from. If ``None`` then it - removes all global commands instead. - type: :class:`~discord.AppCommandType` - The type of command to clear. If not given or ``None`` then it removes all commands - regardless of the type. - """ - - if type is None or type is AppCommandType.chat_input: - if guild is None: - self._global_commands.clear() - else: - try: - commands = self._guild_commands[guild.id] - except KeyError: - pass - else: - commands.clear() - - guild_id = None if guild is None else guild.id - if type is None: - self._context_menus = { - (name, _guild_id, value): cmd - for (name, _guild_id, value), cmd in self._context_menus.items() - if _guild_id != guild_id - } - elif type in (AppCommandType.user, AppCommandType.message): - self._context_menus = { - (name, _guild_id, value): cmd - for (name, _guild_id, value), cmd in self._context_menus.items() - if _guild_id != guild_id or value != type.value - } - - @overload - def get_command( - self, - command: str, - /, - *, - guild: Optional[Snowflake] = ..., - type: Literal[AppCommandType.message, AppCommandType.user], - ) -> Optional[ContextMenu]: - ... - - @overload - def get_command( - self, - command: str, - /, - *, - guild: Optional[Snowflake] = ..., - type: Literal[AppCommandType.chat_input] = ..., - ) -> Optional[Union[Command[Any, ..., Any], Group]]: - ... - - @overload - def get_command( - self, - command: str, - /, - *, - guild: Optional[Snowflake] = ..., - type: AppCommandType, - ) -> Optional[Union[Command[Any, ..., Any], ContextMenu, Group]]: - ... - - def get_command( - self, - command: str, - /, - *, - guild: Optional[Snowflake] = None, - type: AppCommandType = AppCommandType.chat_input, - ) -> Optional[Union[Command[Any, ..., Any], ContextMenu, Group]]: - """Gets an application command from the tree. - - Parameters - ----------- - command: :class:`str` - The name of the root command to get. - guild: Optional[:class:`~discord.abc.Snowflake`] - The guild to get the command from. If not given or ``None`` then it - gets a global command instead. - type: :class:`~discord.AppCommandType` - The type of command to get. Defaults to :attr:`~discord.AppCommandType.chat_input`, - i.e. slash commands. - - Returns - --------- - Optional[Union[:class:`Command`, :class:`ContextMenu`, :class:`Group`]] - The application command that was found. - If nothing was found then ``None`` is returned instead. - """ - - if type is AppCommandType.chat_input: - if guild is None: - return self._global_commands.get(command) - else: - try: - commands = self._guild_commands[guild.id] - except KeyError: - return None - else: - return commands.get(command) - elif type in (AppCommandType.user, AppCommandType.message): - guild_id = None if guild is None else guild.id - key = (command, guild_id, type.value) - return self._context_menus.get(key) - - @overload - def get_commands( - self, - *, - guild: Optional[Snowflake] = ..., - type: Literal[AppCommandType.message, AppCommandType.user], - ) -> List[ContextMenu]: - ... - - @overload - def get_commands( - self, - *, - guild: Optional[Snowflake] = ..., - type: Literal[AppCommandType.chat_input], - ) -> List[Union[Command[Any, ..., Any], Group]]: - ... - - @overload - def get_commands( - self, - *, - guild: Optional[Snowflake] = ..., - type: AppCommandType, - ) -> Union[List[Union[Command[Any, ..., Any], Group]], List[ContextMenu]]: - ... - - @overload - def get_commands( - self, - *, - guild: Optional[Snowflake] = ..., - type: Optional[AppCommandType] = ..., - ) -> List[Union[Command[Any, ..., Any], Group, ContextMenu]]: - ... - - def get_commands( - self, - *, - guild: Optional[Snowflake] = None, - type: Optional[AppCommandType] = None, - ) -> Union[ - List[ContextMenu], - List[Union[Command[Any, ..., Any], Group]], - List[Union[Command[Any, ..., Any], Group, ContextMenu]], - ]: - """Gets all application commands from the tree. - - Parameters - ----------- - guild: Optional[:class:`~discord.abc.Snowflake`] - The guild to get the commands from, not including global commands. - If not given or ``None`` then only global commands are returned. - type: Optional[:class:`~discord.AppCommandType`] - The type of commands to get. When not given or ``None``, then all - command types are returned. - - Returns - --------- - List[Union[:class:`ContextMenu`, :class:`Command`, :class:`Group`]] - The application commands from the tree. - """ - if type is None: - return self._get_all_commands(guild=guild) - - if type is AppCommandType.chat_input: - if guild is None: - return list(self._global_commands.values()) - else: - try: - commands = self._guild_commands[guild.id] - except KeyError: - return [] - else: - return list(commands.values()) - else: - guild_id = None if guild is None else guild.id - value = type.value - return [command for ((_, g, t), command) in self._context_menus.items() if g == guild_id and t == value] - - @overload - def walk_commands( - self, - *, - guild: Optional[Snowflake] = ..., - type: Literal[AppCommandType.message, AppCommandType.user], - ) -> Generator[ContextMenu, None, None]: - ... - - @overload - def walk_commands( - self, - *, - guild: Optional[Snowflake] = ..., - type: Literal[AppCommandType.chat_input] = ..., - ) -> Generator[Union[Command[Any, ..., Any], Group], None, None]: - ... - - @overload - def walk_commands( - self, - *, - guild: Optional[Snowflake] = ..., - type: AppCommandType, - ) -> Union[Generator[Union[Command[Any, ..., Any], Group], None, None], Generator[ContextMenu, None, None]]: - ... - - def walk_commands( - self, - *, - guild: Optional[Snowflake] = None, - type: AppCommandType = AppCommandType.chat_input, - ) -> Union[Generator[Union[Command[Any, ..., Any], Group], None, None], Generator[ContextMenu, None, None]]: - """An iterator that recursively walks through all application commands and child commands from the tree. - - Parameters - ----------- - guild: Optional[:class:`~discord.abc.Snowflake`] - The guild to iterate the commands from, not including global commands. - If not given or ``None`` then only global commands are iterated. - type: :class:`~discord.AppCommandType` - The type of commands to iterate over. Defaults to :attr:`~discord.AppCommandType.chat_input`, - i.e. slash commands. - - Yields - --------- - Union[:class:`ContextMenu`, :class:`Command`, :class:`Group`] - The application commands from the tree. - """ - - if type is AppCommandType.chat_input: - if guild is None: - for cmd in self._global_commands.values(): - yield cmd - if isinstance(cmd, Group): - yield from cmd.walk_commands() - else: - try: - commands = self._guild_commands[guild.id] - except KeyError: - return - else: - for cmd in commands.values(): - yield cmd - if isinstance(cmd, Group): - yield from cmd.walk_commands() - else: - guild_id = None if guild is None else guild.id - value = type.value - for ((_, g, t), command) in self._context_menus.items(): - if g == guild_id and t == value: - yield command - - def _get_all_commands( - self, *, guild: Optional[Snowflake] = None - ) -> List[Union[Command[Any, ..., Any], Group, ContextMenu]]: - if guild is None: - base: List[Union[Command[Any, ..., Any], Group, ContextMenu]] = list(self._global_commands.values()) - base.extend(cmd for ((_, g, _), cmd) in self._context_menus.items() if g is None) - return base - else: - try: - commands = self._guild_commands[guild.id] - except KeyError: - guild_id = guild.id - return [cmd for ((_, g, _), cmd) in self._context_menus.items() if g == guild_id] - else: - base: List[Union[Command[Any, ..., Any], Group, ContextMenu]] = list(commands.values()) - guild_id = guild.id - base.extend(cmd for ((_, g, _), cmd) in self._context_menus.items() if g == guild_id) - return base - - def _remove_with_module(self, name: str) -> None: - remove: List[Any] = [] - for key, cmd in self._context_menus.items(): - if cmd.module is not None and _is_submodule(name, cmd.module): - remove.append(key) - - for key in remove: - del self._context_menus[key] - - remove = [] - for key, cmd in self._global_commands.items(): - if cmd.module is not None and _is_submodule(name, cmd.module): - remove.append(key) - - for key in remove: - del self._global_commands[key] - - for mapping in self._guild_commands.values(): - remove = [] - for key, cmd in mapping.items(): - if cmd.module is not None and _is_submodule(name, cmd.module): - remove.append(key) - - for key in remove: - del mapping[key] - - async def on_error(self, interaction: Interaction[ClientT], error: AppCommandError, /) -> None: - """|coro| - - A callback that is called when any command raises an :exc:`AppCommandError`. - - The default implementation logs the exception using the library logger - if the command does not have any error handlers attached to it. - - To get the command that failed, :attr:`discord.Interaction.command` should - be used. - - Parameters - ----------- - interaction: :class:`~discord.Interaction` - The interaction that is being handled. - error: :exc:`AppCommandError` - The exception that was raised. - """ - - command = interaction.command - if command is not None: - if command._has_any_error_handlers(): - return - - _log.error('Ignoring exception in command %r', command.name, exc_info=error) - else: - _log.error('Ignoring exception in command tree', exc_info=error) - - def error(self, coro: ErrorFunc) -> ErrorFunc: - """A decorator that registers a coroutine as a local error handler. - - This must match the signature of the :meth:`on_error` callback. - - The error passed will be derived from :exc:`AppCommandError`. - - Parameters - ----------- - coro: :ref:`coroutine ` - The coroutine to register as the local error handler. - - Raises - ------- - TypeError - The coroutine passed is not actually a coroutine or does - not match the signature. - """ - - if not inspect.iscoroutinefunction(coro): - raise TypeError('The error handler must be a coroutine.') - - params = inspect.signature(coro).parameters - if len(params) != 2: - raise TypeError('error handler must have 2 parameters') - - self.on_error = coro - return coro - - def command( - self, - *, - name: Union[str, locale_str] = MISSING, - description: Union[str, locale_str] = MISSING, - nsfw: bool = False, - guild: Optional[Snowflake] = MISSING, - guilds: Sequence[Snowflake] = MISSING, - auto_locale_strings: bool = True, - extras: Dict[Any, Any] = MISSING, - ) -> Callable[[CommandCallback[Group, P, T]], Command[Group, P, T]]: - """A decorator that creates an application command from a regular function directly under this tree. - - Parameters - ------------ - name: Union[:class:`str`, :class:`locale_str`] - The name of the application command. If not given, it defaults to a lower-case - version of the callback name. - description: Union[:class:`str`, :class:`locale_str`] - The description of the application command. This shows up in the UI to describe - the application command. If not given, it defaults to the first line of the docstring - of the callback shortened to 100 characters. - nsfw: :class:`bool` - Whether the command is NSFW and should only work in NSFW channels. Defaults to ``False``. - - Due to a Discord limitation, this does not work on subcommands. - guild: Optional[:class:`~discord.abc.Snowflake`] - The guild to add the command to. If not given or ``None`` then it - becomes a global command instead. - guilds: List[:class:`~discord.abc.Snowflake`] - The list of guilds to add the command to. This cannot be mixed - with the ``guild`` parameter. If no guilds are given at all - then it becomes a global command instead. - auto_locale_strings: :class:`bool` - If this is set to ``True``, then all translatable strings will implicitly - be wrapped into :class:`locale_str` rather than :class:`str`. This could - avoid some repetition and be more ergonomic for certain defaults such - as default command names, command descriptions, and parameter names. - Defaults to ``True``. - extras: :class:`dict` - A dictionary that can be used to store extraneous data. - The library will not touch any values or keys within this dictionary. - """ - - def decorator(func: CommandCallback[Group, P, T]) -> Command[Group, P, T]: - if not inspect.iscoroutinefunction(func): - raise TypeError('command function must be a coroutine function') - - if description is MISSING: - if func.__doc__ is None: - desc = '…' - else: - desc = _shorten(func.__doc__) - else: - desc = description - - command = Command( - name=name if name is not MISSING else func.__name__, - description=desc, - callback=func, - nsfw=nsfw, - parent=None, - auto_locale_strings=auto_locale_strings, - extras=extras, - ) - self.add_command(command, guild=guild, guilds=guilds) - return command - - return decorator - - def context_menu( - self, - *, - name: Union[str, locale_str] = MISSING, - nsfw: bool = False, - guild: Optional[Snowflake] = MISSING, - guilds: Sequence[Snowflake] = MISSING, - auto_locale_strings: bool = True, - extras: Dict[Any, Any] = MISSING, - ) -> Callable[[ContextMenuCallback], ContextMenu]: - """A decorator that creates an application command context menu from a regular function directly under this tree. - - This function must have a signature of :class:`~discord.Interaction` as its first parameter - and taking either a :class:`~discord.Member`, :class:`~discord.User`, or :class:`~discord.Message`, - or a :obj:`typing.Union` of ``Member`` and ``User`` as its second parameter. - - Examples - --------- - - .. code-block:: python3 - - @app_commands.context_menu() - async def react(interaction: discord.Interaction, message: discord.Message): - await interaction.response.send_message('Very cool message!', ephemeral=True) - - @app_commands.context_menu() - async def ban(interaction: discord.Interaction, user: discord.Member): - await interaction.response.send_message(f'Should I actually ban {user}...', ephemeral=True) - - Parameters - ------------ - name: Union[:class:`str`, :class:`locale_str`] - The name of the context menu command. If not given, it defaults to a title-case - version of the callback name. Note that unlike regular slash commands this can - have spaces and upper case characters in the name. - nsfw: :class:`bool` - Whether the command is NSFW and should only work in NSFW channels. Defaults to ``False``. - - Due to a Discord limitation, this does not work on subcommands. - guild: Optional[:class:`~discord.abc.Snowflake`] - The guild to add the command to. If not given or ``None`` then it - becomes a global command instead. - guilds: List[:class:`~discord.abc.Snowflake`] - The list of guilds to add the command to. This cannot be mixed - with the ``guild`` parameter. If no guilds are given at all - then it becomes a global command instead. - auto_locale_strings: :class:`bool` - If this is set to ``True``, then all translatable strings will implicitly - be wrapped into :class:`locale_str` rather than :class:`str`. This could - avoid some repetition and be more ergonomic for certain defaults such - as default command names, command descriptions, and parameter names. - Defaults to ``True``. - extras: :class:`dict` - A dictionary that can be used to store extraneous data. - The library will not touch any values or keys within this dictionary. - """ - - def decorator(func: ContextMenuCallback) -> ContextMenu: - if not inspect.iscoroutinefunction(func): - raise TypeError('context menu function must be a coroutine function') - - actual_name = func.__name__.title() if name is MISSING else name - context_menu = ContextMenu( - name=actual_name, - nsfw=nsfw, - callback=func, - auto_locale_strings=auto_locale_strings, - extras=extras, - ) - self.add_command(context_menu, guild=guild, guilds=guilds) - return context_menu - - return decorator - - @property - def translator(self) -> Optional[Translator]: - """Optional[:class:`Translator`]: The translator, if any, responsible for handling translation of commands. - - To change the translator, use :meth:`set_translator`. - """ - return self._state._translator - - async def set_translator(self, translator: Optional[Translator]) -> None: - """|coro| - - Sets the translator to use for translating commands. - - If a translator was previously set, it will be unloaded using its - :meth:`Translator.unload` method. - - When a translator is set, it will be loaded using its :meth:`Translator.load` method. - - Parameters - ------------ - translator: Optional[:class:`Translator`] - The translator to use. If ``None`` then the translator is just removed and unloaded. - - Raises - ------- - TypeError - The translator was not ``None`` or a :class:`Translator` instance. - """ - - if translator is not None and not isinstance(translator, Translator): - raise TypeError(f'expected None or Translator instance, received {translator.__class__.__name__} instead') - - old_translator = self._state._translator - if old_translator is not None: - await old_translator.unload() - - if translator is None: - self._state._translator = None - else: - await translator.load() - self._state._translator = translator - - async def sync(self, *, guild: Optional[Snowflake] = None) -> List[AppCommand]: - """|coro| - - Syncs the application commands to Discord. - - This also runs the translator to get the translated strings necessary for - feeding back into Discord. - - This must be called for the application commands to show up. - - Parameters - ----------- - guild: Optional[:class:`~discord.abc.Snowflake`] - The guild to sync the commands to. If ``None`` then it - syncs all global commands instead. - - Raises - ------- - HTTPException - Syncing the commands failed. - CommandSyncFailure - Syncing the commands failed due to a user related error, typically because - the command has invalid data. This is equivalent to an HTTP status code of - 400. - Forbidden - The client does not have the ``applications.commands`` scope in the guild. - MissingApplicationID - The client does not have an application ID. - TranslationError - An error occurred while translating the commands. - - Returns - -------- - List[:class:`AppCommand`] - The application's commands that got synced. - """ - - if self.client.application_id is None: - raise MissingApplicationID - - commands = self._get_all_commands(guild=guild) - - translator = self.translator - if translator: - payload = [await command.get_translated_payload(translator) for command in commands] - else: - payload = [command.to_dict() for command in commands] - - try: - if guild is None: - data = await self._http.bulk_upsert_global_commands(self.client.application_id, payload=payload) - else: - data = await self._http.bulk_upsert_guild_commands(self.client.application_id, guild.id, payload=payload) - except HTTPException as e: - if e.status == 400 and e.code == 50035: - raise CommandSyncFailure(e, commands) from None - raise - - return [AppCommand(data=d, state=self._state) for d in data] - - async def _dispatch_error(self, interaction: Interaction[ClientT], error: AppCommandError, /) -> None: - command = interaction.command - interaction.command_failed = True - try: - if isinstance(command, Command): - await command._invoke_error_handlers(interaction, error) - finally: - await self.on_error(interaction, error) - - def _from_interaction(self, interaction: Interaction[ClientT]) -> None: - async def wrapper(): - try: - await self._call(interaction) - except AppCommandError as e: - await self._dispatch_error(interaction, e) - - self.client.loop.create_task(wrapper(), name='CommandTree-invoker') - - def _get_context_menu(self, data: ApplicationCommandInteractionData) -> Optional[ContextMenu]: - name = data['name'] - guild_id = _get_as_snowflake(data, 'guild_id') - t = data.get('type', 1) - cmd = self._context_menus.get((name, guild_id, t)) - if cmd is None and self.fallback_to_global: - return self._context_menus.get((name, None, t)) - return cmd - - def _get_app_command_options( - self, data: ApplicationCommandInteractionData - ) -> Tuple[Command[Any, ..., Any], List[ApplicationCommandInteractionDataOption]]: - parents: List[str] = [] - name = data['name'] - - command_guild_id = _get_as_snowflake(data, 'guild_id') - if command_guild_id: - try: - guild_commands = self._guild_commands[command_guild_id] - except KeyError: - command = None if not self.fallback_to_global else self._global_commands.get(name) - else: - command = guild_commands.get(name) - if command is None and self.fallback_to_global: - command = self._global_commands.get(name) - else: - command = self._global_commands.get(name) - - # If it's not found at this point then it's not gonna be found at any point - if command is None: - raise CommandNotFound(name, parents) - - # This could be done recursively but it'd be a bother due to the state needed - # to be tracked above like the parents, the actual command type, and the - # resulting options we care about - searching = True - options: List[ApplicationCommandInteractionDataOption] = data.get('options', []) - while searching: - for option in options: - # Find subcommands - if option.get('type', 0) in (1, 2): - parents.append(name) - name = option['name'] - command = command._get_internal_command(name) - if command is None: - raise CommandNotFound(name, parents) - options = option.get('options', []) - break - else: - searching = False - break - else: - break - - if isinstance(command, Group): - # Right now, groups can't be invoked. This is a Discord limitation in how they - # do slash commands. So if we're here and we have a Group rather than a Command instance - # then something in the code is out of date from the data that Discord has. - raise CommandSignatureMismatch(command) - - return (command, options) - - async def _call_context_menu( - self, interaction: Interaction[ClientT], data: ApplicationCommandInteractionData, type: int - ) -> None: - name = data['name'] - guild_id = _get_as_snowflake(data, 'guild_id') - ctx_menu = self._context_menus.get((name, guild_id, type)) - if ctx_menu is None and self.fallback_to_global: - ctx_menu = self._context_menus.get((name, None, type)) - - # Pre-fill the cached slot to prevent re-computation - interaction._cs_command = ctx_menu - - if ctx_menu is None: - raise CommandNotFound(name, [], AppCommandType(type)) - - resolved = Namespace._get_resolved_items(interaction, data.get('resolved', {})) - - # This is annotated as str | int but realistically this will always be str - target_id: Optional[Union[str, int]] = data.get('target_id') - # Right now, the only types are message and user - # Therefore, there's no conflict with snowflakes - - # This will always work at runtime - key = ResolveKey.any_with(target_id) # type: ignore - value = resolved.get(key) - if ctx_menu.type.value != type: - raise CommandSignatureMismatch(ctx_menu) - - if value is None: - raise AppCommandError('This should not happen if Discord sent well-formed data.') - - # I assume I don't have to type check here. - try: - await ctx_menu._invoke(interaction, value) - except AppCommandError as e: - if ctx_menu.on_error is not None: - await ctx_menu.on_error(interaction, e) - await self.on_error(interaction, e) - else: - self.client.dispatch('app_command_completion', interaction, ctx_menu) - - async def interaction_check(self, interaction: Interaction[ClientT], /) -> bool: - """|coro| - - A global check to determine if an :class:`~discord.Interaction` should - be processed by the tree. - - The default implementation returns True (all interactions are processed), - but can be overridden if custom behaviour is desired. - """ - return True - - async def _call(self, interaction: Interaction[ClientT]) -> None: - if not await self.interaction_check(interaction): - interaction.command_failed = True - return - - data: ApplicationCommandInteractionData = interaction.data # type: ignore - type = data.get('type', 1) - if type != 1: - # Context menu command... - await self._call_context_menu(interaction, data, type) - return - - command, options = self._get_app_command_options(data) - - # Pre-fill the cached slot to prevent re-computation - interaction._cs_command = command - - # At this point options refers to the arguments of the command - # and command refers to the class type we care about - namespace = Namespace(interaction, data.get('resolved', {}), options) - - # Same pre-fill as above - interaction._cs_namespace = namespace - - # Auto complete handles the namespace differently... so at this point this is where we decide where that is. - if interaction.type is InteractionType.autocomplete: - focused = next((opt['name'] for opt in options if opt.get('focused')), None) - if focused is None: - raise AppCommandError('This should not happen, but there is no focused element. This is a Discord bug.') - - try: - await command._invoke_autocomplete(interaction, focused, namespace) - except Exception: - # Suppress exception since it can't be handled anyway. - pass - - return - - try: - await command._invoke_with_namespace(interaction, namespace) - except AppCommandError as e: - interaction.command_failed = True - await command._invoke_error_handlers(interaction, e) - await self.on_error(interaction, e) - else: - if not interaction.command_failed: - self.client.dispatch('app_command_completion', interaction, command) diff --git a/.venv/Lib/site-packages/discord/appinfo.py b/.venv/Lib/site-packages/discord/appinfo.py deleted file mode 100644 index 129e543..0000000 --- a/.venv/Lib/site-packages/discord/appinfo.py +++ /dev/null @@ -1,362 +0,0 @@ -""" -The MIT License (MIT) - -Copyright (c) 2015-present Rapptz - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the "Software"), -to deal in the Software without restriction, including without limitation -the rights to use, copy, modify, merge, publish, distribute, sublicense, -and/or sell copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. -""" - -from __future__ import annotations - -from typing import List, TYPE_CHECKING, Optional - -from . import utils -from .asset import Asset -from .flags import ApplicationFlags -from .permissions import Permissions - -if TYPE_CHECKING: - from .guild import Guild - from .types.appinfo import ( - AppInfo as AppInfoPayload, - PartialAppInfo as PartialAppInfoPayload, - Team as TeamPayload, - InstallParams as InstallParamsPayload, - ) - from .user import User - from .state import ConnectionState - -__all__ = ( - 'AppInfo', - 'PartialAppInfo', - 'AppInstallParams', -) - - -class AppInfo: - """Represents the application info for the bot provided by Discord. - - - Attributes - ------------- - id: :class:`int` - The application ID. - name: :class:`str` - The application name. - owner: :class:`User` - The application owner. - team: Optional[:class:`Team`] - The application's team. - - .. versionadded:: 1.3 - - description: :class:`str` - The application description. - bot_public: :class:`bool` - Whether the bot can be invited by anyone or if it is locked - to the application owner. - bot_require_code_grant: :class:`bool` - Whether the bot requires the completion of the full oauth2 code - grant flow to join. - rpc_origins: Optional[List[:class:`str`]] - A list of RPC origin URLs, if RPC is enabled. - - verify_key: :class:`str` - The hex encoded key for verification in interactions and the - GameSDK's :ddocs:`GetTicket `. - - .. versionadded:: 1.3 - - guild_id: Optional[:class:`int`] - If this application is a game sold on Discord, - this field will be the guild to which it has been linked to. - - .. versionadded:: 1.3 - - primary_sku_id: Optional[:class:`int`] - If this application is a game sold on Discord, - this field will be the id of the "Game SKU" that is created, - if it exists. - - .. versionadded:: 1.3 - - slug: Optional[:class:`str`] - If this application is a game sold on Discord, - this field will be the URL slug that links to the store page. - - .. versionadded:: 1.3 - - terms_of_service_url: Optional[:class:`str`] - The application's terms of service URL, if set. - - .. versionadded:: 2.0 - - privacy_policy_url: Optional[:class:`str`] - The application's privacy policy URL, if set. - - .. versionadded:: 2.0 - - tags: List[:class:`str`] - The list of tags describing the functionality of the application. - - .. versionadded:: 2.0 - - custom_install_url: List[:class:`str`] - The custom authorization URL for the application, if enabled. - - .. versionadded:: 2.0 - - install_params: Optional[:class:`AppInstallParams`] - The settings for custom authorization URL of application, if enabled. - - .. versionadded:: 2.0 - role_connections_verification_url: Optional[:class:`str`] - The application's connection verification URL which will render the application as - a verification method in the guild's role verification configuration. - - .. versionadded:: 2.2 - """ - - __slots__ = ( - '_state', - 'description', - 'id', - 'name', - 'rpc_origins', - 'bot_public', - 'bot_require_code_grant', - 'owner', - '_icon', - 'verify_key', - 'team', - 'guild_id', - 'primary_sku_id', - 'slug', - '_cover_image', - '_flags', - 'terms_of_service_url', - 'privacy_policy_url', - 'tags', - 'custom_install_url', - 'install_params', - 'role_connections_verification_url', - ) - - def __init__(self, state: ConnectionState, data: AppInfoPayload): - from .team import Team - - self._state: ConnectionState = state - self.id: int = int(data['id']) - self.name: str = data['name'] - self.description: str = data['description'] - self._icon: Optional[str] = data['icon'] - self.rpc_origins: Optional[List[str]] = data.get('rpc_origins') - self.bot_public: bool = data['bot_public'] - self.bot_require_code_grant: bool = data['bot_require_code_grant'] - self.owner: User = state.create_user(data['owner']) - - team: Optional[TeamPayload] = data.get('team') - self.team: Optional[Team] = Team(state, team) if team else None - - self.verify_key: str = data['verify_key'] - - self.guild_id: Optional[int] = utils._get_as_snowflake(data, 'guild_id') - - self.primary_sku_id: Optional[int] = utils._get_as_snowflake(data, 'primary_sku_id') - self.slug: Optional[str] = data.get('slug') - self._flags: int = data.get('flags', 0) - self._cover_image: Optional[str] = data.get('cover_image') - self.terms_of_service_url: Optional[str] = data.get('terms_of_service_url') - self.privacy_policy_url: Optional[str] = data.get('privacy_policy_url') - self.tags: List[str] = data.get('tags', []) - self.custom_install_url: Optional[str] = data.get('custom_install_url') - self.role_connections_verification_url: Optional[str] = data.get('role_connections_verification_url') - - params = data.get('install_params') - self.install_params: Optional[AppInstallParams] = AppInstallParams(params) if params else None - - def __repr__(self) -> str: - return ( - f'<{self.__class__.__name__} id={self.id} name={self.name!r} ' - f'description={self.description!r} public={self.bot_public} ' - f'owner={self.owner!r}>' - ) - - @property - def icon(self) -> Optional[Asset]: - """Optional[:class:`.Asset`]: Retrieves the application's icon asset, if any.""" - if self._icon is None: - return None - return Asset._from_icon(self._state, self.id, self._icon, path='app') - - @property - def cover_image(self) -> Optional[Asset]: - """Optional[:class:`.Asset`]: Retrieves the cover image on a store embed, if any. - - This is only available if the application is a game sold on Discord. - """ - if self._cover_image is None: - return None - return Asset._from_cover_image(self._state, self.id, self._cover_image) - - @property - def guild(self) -> Optional[Guild]: - """Optional[:class:`Guild`]: If this application is a game sold on Discord, - this field will be the guild to which it has been linked - - .. versionadded:: 1.3 - """ - return self._state._get_guild(self.guild_id) - - @property - def flags(self) -> ApplicationFlags: - """:class:`ApplicationFlags`: The application's flags. - - .. versionadded:: 2.0 - """ - return ApplicationFlags._from_value(self._flags) - - -class PartialAppInfo: - """Represents a partial AppInfo given by :func:`~discord.abc.GuildChannel.create_invite` - - .. versionadded:: 2.0 - - Attributes - ------------- - id: :class:`int` - The application ID. - name: :class:`str` - The application name. - description: :class:`str` - The application description. - rpc_origins: Optional[List[:class:`str`]] - A list of RPC origin URLs, if RPC is enabled. - verify_key: :class:`str` - The hex encoded key for verification in interactions and the - GameSDK's :ddocs:`GetTicket `. - terms_of_service_url: Optional[:class:`str`] - The application's terms of service URL, if set. - privacy_policy_url: Optional[:class:`str`] - The application's privacy policy URL, if set. - approximate_guild_count: :class:`int` - The approximate count of the guilds the bot was added to. - - .. versionadded:: 2.3 - redirect_uris: List[:class:`str`] - A list of authentication redirect URIs. - - .. versionadded:: 2.3 - interactions_endpoint_url: Optional[:class:`str`] - The interactions endpoint url of the application to receive interactions over this endpoint rather than - over the gateway, if configured. - - .. versionadded:: 2.3 - role_connections_verification_url: Optional[:class:`str`] - The application's connection verification URL which will render the application as - a verification method in the guild's role verification configuration. - - .. versionadded:: 2.3 - """ - - __slots__ = ( - '_state', - 'id', - 'name', - 'description', - 'rpc_origins', - 'verify_key', - 'terms_of_service_url', - 'privacy_policy_url', - '_icon', - '_flags', - '_cover_image', - 'approximate_guild_count', - 'redirect_uris', - 'interactions_endpoint_url', - 'role_connections_verification_url', - ) - - def __init__(self, *, state: ConnectionState, data: PartialAppInfoPayload): - self._state: ConnectionState = state - self.id: int = int(data['id']) - self.name: str = data['name'] - self._icon: Optional[str] = data.get('icon') - self._flags: int = data.get('flags', 0) - self._cover_image: Optional[str] = data.get('cover_image') - self.description: str = data['description'] - self.rpc_origins: Optional[List[str]] = data.get('rpc_origins') - self.verify_key: str = data['verify_key'] - self.terms_of_service_url: Optional[str] = data.get('terms_of_service_url') - self.privacy_policy_url: Optional[str] = data.get('privacy_policy_url') - self.approximate_guild_count: int = data.get('approximate_guild_count', 0) - self.redirect_uris: List[str] = data.get('redirect_uris', []) - self.interactions_endpoint_url: Optional[str] = data.get('interactions_endpoint_url') - self.role_connections_verification_url: Optional[str] = data.get('role_connections_verification_url') - - def __repr__(self) -> str: - return f'<{self.__class__.__name__} id={self.id} name={self.name!r} description={self.description!r}>' - - @property - def icon(self) -> Optional[Asset]: - """Optional[:class:`.Asset`]: Retrieves the application's icon asset, if any.""" - if self._icon is None: - return None - return Asset._from_icon(self._state, self.id, self._icon, path='app') - - @property - def cover_image(self) -> Optional[Asset]: - """Optional[:class:`.Asset`]: Retrieves the cover image of the application's default rich presence. - - This is only available if the application is a game sold on Discord. - - .. versionadded:: 2.3 - """ - if self._cover_image is None: - return None - return Asset._from_cover_image(self._state, self.id, self._cover_image) - - @property - def flags(self) -> ApplicationFlags: - """:class:`ApplicationFlags`: The application's flags. - - .. versionadded:: 2.0 - """ - return ApplicationFlags._from_value(self._flags) - - -class AppInstallParams: - """Represents the settings for custom authorization URL of an application. - - .. versionadded:: 2.0 - - Attributes - ---------- - scopes: List[:class:`str`] - The list of :ddocs:`OAuth2 scopes ` - to add the application to a guild with. - permissions: :class:`Permissions` - The permissions to give to application in the guild. - """ - - __slots__ = ('scopes', 'permissions') - - def __init__(self, data: InstallParamsPayload) -> None: - self.scopes: List[str] = data.get('scopes', []) - self.permissions: Permissions = Permissions(int(data['permissions'])) diff --git a/.venv/Lib/site-packages/discord/asset.py b/.venv/Lib/site-packages/discord/asset.py deleted file mode 100644 index d88ebb9..0000000 --- a/.venv/Lib/site-packages/discord/asset.py +++ /dev/null @@ -1,516 +0,0 @@ -""" -The MIT License (MIT) - -Copyright (c) 2015-present Rapptz - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the "Software"), -to deal in the Software without restriction, including without limitation -the rights to use, copy, modify, merge, publish, distribute, sublicense, -and/or sell copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. -""" - -from __future__ import annotations - -import io -import os -from typing import Any, Literal, Optional, TYPE_CHECKING, Tuple, Union -from .errors import DiscordException -from . import utils -from .file import File - -import yarl - -# fmt: off -__all__ = ( - 'Asset', -) -# fmt: on - -if TYPE_CHECKING: - from typing_extensions import Self - - from .state import ConnectionState - from .webhook.async_ import _WebhookState - - _State = Union[ConnectionState, _WebhookState] - - ValidStaticFormatTypes = Literal['webp', 'jpeg', 'jpg', 'png'] - ValidAssetFormatTypes = Literal['webp', 'jpeg', 'jpg', 'png', 'gif'] - -VALID_STATIC_FORMATS = frozenset({"jpeg", "jpg", "webp", "png"}) -VALID_ASSET_FORMATS = VALID_STATIC_FORMATS | {"gif"} - - -MISSING = utils.MISSING - - -class AssetMixin: - __slots__ = () - url: str - _state: Optional[Any] - - async def read(self) -> bytes: - """|coro| - - Retrieves the content of this asset as a :class:`bytes` object. - - Raises - ------ - DiscordException - There was no internal connection state. - HTTPException - Downloading the asset failed. - NotFound - The asset was deleted. - - Returns - ------- - :class:`bytes` - The content of the asset. - """ - if self._state is None: - raise DiscordException('Invalid state (no ConnectionState provided)') - - return await self._state.http.get_from_cdn(self.url) - - async def save(self, fp: Union[str, bytes, os.PathLike[Any], io.BufferedIOBase], *, seek_begin: bool = True) -> int: - """|coro| - - Saves this asset into a file-like object. - - Parameters - ---------- - fp: Union[:class:`io.BufferedIOBase`, :class:`os.PathLike`] - The file-like object to save this asset to or the filename - to use. If a filename is passed then a file is created with that - filename and used instead. - seek_begin: :class:`bool` - Whether to seek to the beginning of the file after saving is - successfully done. - - Raises - ------ - DiscordException - There was no internal connection state. - HTTPException - Downloading the asset failed. - NotFound - The asset was deleted. - - Returns - -------- - :class:`int` - The number of bytes written. - """ - - data = await self.read() - if isinstance(fp, io.BufferedIOBase): - written = fp.write(data) - if seek_begin: - fp.seek(0) - return written - else: - with open(fp, 'wb') as f: - return f.write(data) - - async def to_file( - self, - *, - filename: Optional[str] = MISSING, - description: Optional[str] = None, - spoiler: bool = False, - ) -> File: - """|coro| - - Converts the asset into a :class:`File` suitable for sending via - :meth:`abc.Messageable.send`. - - .. versionadded:: 2.0 - - Parameters - ----------- - filename: Optional[:class:`str`] - The filename of the file. If not provided, then the filename from - the asset's URL is used. - description: Optional[:class:`str`] - The description for the file. - spoiler: :class:`bool` - Whether the file is a spoiler. - - Raises - ------ - DiscordException - The asset does not have an associated state. - ValueError - The asset is a unicode emoji. - TypeError - The asset is a sticker with lottie type. - HTTPException - Downloading the asset failed. - NotFound - The asset was deleted. - - Returns - ------- - :class:`File` - The asset as a file suitable for sending. - """ - - data = await self.read() - file_filename = filename if filename is not MISSING else yarl.URL(self.url).name - return File(io.BytesIO(data), filename=file_filename, description=description, spoiler=spoiler) - - -class Asset(AssetMixin): - """Represents a CDN asset on Discord. - - .. container:: operations - - .. describe:: str(x) - - Returns the URL of the CDN asset. - - .. describe:: len(x) - - Returns the length of the CDN asset's URL. - - .. describe:: x == y - - Checks if the asset is equal to another asset. - - .. describe:: x != y - - Checks if the asset is not equal to another asset. - - .. describe:: hash(x) - - Returns the hash of the asset. - """ - - __slots__: Tuple[str, ...] = ( - '_state', - '_url', - '_animated', - '_key', - ) - - BASE = 'https://cdn.discordapp.com' - - def __init__(self, state: _State, *, url: str, key: str, animated: bool = False) -> None: - self._state: _State = state - self._url: str = url - self._animated: bool = animated - self._key: str = key - - @classmethod - def _from_default_avatar(cls, state: _State, index: int) -> Self: - return cls( - state, - url=f'{cls.BASE}/embed/avatars/{index}.png', - key=str(index), - animated=False, - ) - - @classmethod - def _from_avatar(cls, state: _State, user_id: int, avatar: str) -> Self: - animated = avatar.startswith('a_') - format = 'gif' if animated else 'png' - return cls( - state, - url=f'{cls.BASE}/avatars/{user_id}/{avatar}.{format}?size=1024', - key=avatar, - animated=animated, - ) - - @classmethod - def _from_guild_avatar(cls, state: _State, guild_id: int, member_id: int, avatar: str) -> Self: - animated = avatar.startswith('a_') - format = 'gif' if animated else 'png' - return cls( - state, - url=f"{cls.BASE}/guilds/{guild_id}/users/{member_id}/avatars/{avatar}.{format}?size=1024", - key=avatar, - animated=animated, - ) - - @classmethod - def _from_icon(cls, state: _State, object_id: int, icon_hash: str, path: str) -> Self: - return cls( - state, - url=f'{cls.BASE}/{path}-icons/{object_id}/{icon_hash}.png?size=1024', - key=icon_hash, - animated=False, - ) - - @classmethod - def _from_app_icon( - cls, state: _State, object_id: int, icon_hash: str, asset_type: Literal['icon', 'cover_image'] - ) -> Self: - return cls( - state, - url=f'{cls.BASE}/app-icons/{object_id}/{asset_type}.png?size=1024', - key=icon_hash, - animated=False, - ) - - @classmethod - def _from_cover_image(cls, state: _State, object_id: int, cover_image_hash: str) -> Self: - return cls( - state, - url=f'{cls.BASE}/app-assets/{object_id}/store/{cover_image_hash}.png?size=1024', - key=cover_image_hash, - animated=False, - ) - - @classmethod - def _from_scheduled_event_cover_image(cls, state: _State, scheduled_event_id: int, cover_image_hash: str) -> Self: - return cls( - state, - url=f'{cls.BASE}/guild-events/{scheduled_event_id}/{cover_image_hash}.png?size=1024', - key=cover_image_hash, - animated=False, - ) - - @classmethod - def _from_guild_image(cls, state: _State, guild_id: int, image: str, path: str) -> Self: - animated = image.startswith('a_') - format = 'gif' if animated else 'png' - return cls( - state, - url=f'{cls.BASE}/{path}/{guild_id}/{image}.{format}?size=1024', - key=image, - animated=animated, - ) - - @classmethod - def _from_guild_icon(cls, state: _State, guild_id: int, icon_hash: str) -> Self: - animated = icon_hash.startswith('a_') - format = 'gif' if animated else 'png' - return cls( - state, - url=f'{cls.BASE}/icons/{guild_id}/{icon_hash}.{format}?size=1024', - key=icon_hash, - animated=animated, - ) - - @classmethod - def _from_sticker_banner(cls, state: _State, banner: int) -> Self: - return cls( - state, - url=f'{cls.BASE}/app-assets/710982414301790216/store/{banner}.png', - key=str(banner), - animated=False, - ) - - @classmethod - def _from_user_banner(cls, state: _State, user_id: int, banner_hash: str) -> Self: - animated = banner_hash.startswith('a_') - format = 'gif' if animated else 'png' - return cls( - state, - url=f'{cls.BASE}/banners/{user_id}/{banner_hash}.{format}?size=512', - key=banner_hash, - animated=animated, - ) - - def __str__(self) -> str: - return self._url - - def __len__(self) -> int: - return len(self._url) - - def __repr__(self) -> str: - shorten = self._url.replace(self.BASE, '') - return f'' - - def __eq__(self, other: object) -> bool: - return isinstance(other, Asset) and self._url == other._url - - def __hash__(self) -> int: - return hash(self._url) - - @property - def url(self) -> str: - """:class:`str`: Returns the underlying URL of the asset.""" - return self._url - - @property - def key(self) -> str: - """:class:`str`: Returns the identifying key of the asset.""" - return self._key - - def is_animated(self) -> bool: - """:class:`bool`: Returns whether the asset is animated.""" - return self._animated - - def replace( - self, - *, - size: int = MISSING, - format: ValidAssetFormatTypes = MISSING, - static_format: ValidStaticFormatTypes = MISSING, - ) -> Self: - """Returns a new asset with the passed components replaced. - - - .. versionchanged:: 2.0 - ``static_format`` is now preferred over ``format`` - if both are present and the asset is not animated. - - .. versionchanged:: 2.0 - This function will now raise :exc:`ValueError` instead of - ``InvalidArgument``. - - Parameters - ----------- - size: :class:`int` - The new size of the asset. - format: :class:`str` - The new format to change it to. Must be either - 'webp', 'jpeg', 'jpg', 'png', or 'gif' if it's animated. - static_format: :class:`str` - The new format to change it to if the asset isn't animated. - Must be either 'webp', 'jpeg', 'jpg', or 'png'. - - Raises - ------- - ValueError - An invalid size or format was passed. - - Returns - -------- - :class:`Asset` - The newly updated asset. - """ - url = yarl.URL(self._url) - path, _ = os.path.splitext(url.path) - - if format is not MISSING: - if self._animated: - if format not in VALID_ASSET_FORMATS: - raise ValueError(f'format must be one of {VALID_ASSET_FORMATS}') - else: - if static_format is MISSING and format not in VALID_STATIC_FORMATS: - raise ValueError(f'format must be one of {VALID_STATIC_FORMATS}') - url = url.with_path(f'{path}.{format}') - - if static_format is not MISSING and not self._animated: - if static_format not in VALID_STATIC_FORMATS: - raise ValueError(f'static_format must be one of {VALID_STATIC_FORMATS}') - url = url.with_path(f'{path}.{static_format}') - - if size is not MISSING: - if not utils.valid_icon_size(size): - raise ValueError('size must be a power of 2 between 16 and 4096') - url = url.with_query(size=size) - else: - url = url.with_query(url.raw_query_string) - - url = str(url) - return Asset(state=self._state, url=url, key=self._key, animated=self._animated) - - def with_size(self, size: int, /) -> Self: - """Returns a new asset with the specified size. - - .. versionchanged:: 2.0 - This function will now raise :exc:`ValueError` instead of - ``InvalidArgument``. - - Parameters - ------------ - size: :class:`int` - The new size of the asset. - - Raises - ------- - ValueError - The asset had an invalid size. - - Returns - -------- - :class:`Asset` - The new updated asset. - """ - if not utils.valid_icon_size(size): - raise ValueError('size must be a power of 2 between 16 and 4096') - - url = str(yarl.URL(self._url).with_query(size=size)) - return Asset(state=self._state, url=url, key=self._key, animated=self._animated) - - def with_format(self, format: ValidAssetFormatTypes, /) -> Self: - """Returns a new asset with the specified format. - - .. versionchanged:: 2.0 - This function will now raise :exc:`ValueError` instead of - ``InvalidArgument``. - - Parameters - ------------ - format: :class:`str` - The new format of the asset. - - Raises - ------- - ValueError - The asset had an invalid format. - - Returns - -------- - :class:`Asset` - The new updated asset. - """ - - if self._animated: - if format not in VALID_ASSET_FORMATS: - raise ValueError(f'format must be one of {VALID_ASSET_FORMATS}') - else: - if format not in VALID_STATIC_FORMATS: - raise ValueError(f'format must be one of {VALID_STATIC_FORMATS}') - - url = yarl.URL(self._url) - path, _ = os.path.splitext(url.path) - url = str(url.with_path(f'{path}.{format}').with_query(url.raw_query_string)) - return Asset(state=self._state, url=url, key=self._key, animated=self._animated) - - def with_static_format(self, format: ValidStaticFormatTypes, /) -> Self: - """Returns a new asset with the specified static format. - - This only changes the format if the underlying asset is - not animated. Otherwise, the asset is not changed. - - .. versionchanged:: 2.0 - This function will now raise :exc:`ValueError` instead of - ``InvalidArgument``. - - Parameters - ------------ - format: :class:`str` - The new static format of the asset. - - Raises - ------- - ValueError - The asset had an invalid format. - - Returns - -------- - :class:`Asset` - The new updated asset. - """ - - if self._animated: - return self - return self.with_format(format) diff --git a/.venv/Lib/site-packages/discord/audit_logs.py b/.venv/Lib/site-packages/discord/audit_logs.py deleted file mode 100644 index eebcecf..0000000 --- a/.venv/Lib/site-packages/discord/audit_logs.py +++ /dev/null @@ -1,865 +0,0 @@ -""" -The MIT License (MIT) - -Copyright (c) 2015-present Rapptz - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the "Software"), -to deal in the Software without restriction, including without limitation -the rights to use, copy, modify, merge, publish, distribute, sublicense, -and/or sell copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. -""" - -from __future__ import annotations - -from typing import TYPE_CHECKING, Any, Callable, ClassVar, Mapping, Generator, List, Optional, Tuple, Type, TypeVar, Union - -from . import enums, flags, utils -from .asset import Asset -from .colour import Colour -from .invite import Invite -from .mixins import Hashable -from .object import Object -from .permissions import PermissionOverwrite, Permissions -from .automod import AutoModTrigger, AutoModRuleAction, AutoModPresets, AutoModRule -from .role import Role -from .emoji import Emoji -from .partial_emoji import PartialEmoji -from .member import Member -from .scheduled_event import ScheduledEvent -from .stage_instance import StageInstance -from .sticker import GuildSticker -from .threads import Thread -from .integrations import PartialIntegration -from .channel import ForumChannel, StageChannel, ForumTag - -__all__ = ( - 'AuditLogDiff', - 'AuditLogChanges', - 'AuditLogEntry', -) - - -if TYPE_CHECKING: - import datetime - - from . import abc - from .guild import Guild - from .state import ConnectionState - from .types.audit_log import ( - AuditLogChange as AuditLogChangePayload, - AuditLogEntry as AuditLogEntryPayload, - ) - from .types.channel import ( - PermissionOverwrite as PermissionOverwritePayload, - ForumTag as ForumTagPayload, - DefaultReaction as DefaultReactionPayload, - ) - from .types.invite import Invite as InvitePayload - from .types.role import Role as RolePayload - from .types.snowflake import Snowflake - from .types.command import ApplicationCommandPermissions - from .types.automod import AutoModerationTriggerMetadata, AutoModerationAction - from .user import User - from .app_commands import AppCommand - from .webhook import Webhook - - TargetType = Union[ - Guild, - abc.GuildChannel, - Member, - User, - Role, - Invite, - Emoji, - StageInstance, - GuildSticker, - Thread, - Object, - PartialIntegration, - AutoModRule, - ScheduledEvent, - Webhook, - AppCommand, - None, - ] - - -def _transform_timestamp(entry: AuditLogEntry, data: Optional[str]) -> Optional[datetime.datetime]: - return utils.parse_time(data) - - -def _transform_color(entry: AuditLogEntry, data: int) -> Colour: - return Colour(data) - - -def _transform_snowflake(entry: AuditLogEntry, data: Snowflake) -> int: - return int(data) - - -def _transform_channel(entry: AuditLogEntry, data: Optional[Snowflake]) -> Optional[Union[abc.GuildChannel, Object]]: - if data is None: - return None - return entry.guild.get_channel(int(data)) or Object(id=data) - - -def _transform_channels_or_threads( - entry: AuditLogEntry, data: List[Snowflake] -) -> List[Union[abc.GuildChannel, Thread, Object]]: - return [entry.guild.get_channel_or_thread(int(data)) or Object(id=data) for data in data] - - -def _transform_member_id(entry: AuditLogEntry, data: Optional[Snowflake]) -> Union[Member, User, None]: - if data is None: - return None - return entry._get_member(int(data)) - - -def _transform_guild_id(entry: AuditLogEntry, data: Optional[Snowflake]) -> Optional[Guild]: - if data is None: - return None - return entry._state._get_guild(int(data)) - - -def _transform_roles(entry: AuditLogEntry, data: List[Snowflake]) -> List[Union[Role, Object]]: - return [entry.guild.get_role(int(role_id)) or Object(role_id, type=Role) for role_id in data] - - -def _transform_applied_forum_tags(entry: AuditLogEntry, data: List[Snowflake]) -> List[Union[ForumTag, Object]]: - thread = entry.target - if isinstance(thread, Thread) and isinstance(thread.parent, ForumChannel): - return [thread.parent.get_tag(tag_id) or Object(id=tag_id, type=ForumTag) for tag_id in map(int, data)] - return [Object(id=tag_id, type=ForumTag) for tag_id in data] - - -def _transform_overloaded_flags(entry: AuditLogEntry, data: int) -> Union[int, flags.ChannelFlags]: - # The `flags` key is definitely overloaded. Right now it's for channels and threads but - # I am aware of `member.flags` and `user.flags` existing. However, this does not impact audit logs - # at the moment but better safe than sorry. - channel_audit_log_types = ( - enums.AuditLogAction.channel_create, - enums.AuditLogAction.channel_update, - enums.AuditLogAction.channel_delete, - enums.AuditLogAction.thread_create, - enums.AuditLogAction.thread_update, - enums.AuditLogAction.thread_delete, - ) - - if entry.action in channel_audit_log_types: - return flags.ChannelFlags._from_value(data) - return data - - -def _transform_forum_tags(entry: AuditLogEntry, data: List[ForumTagPayload]) -> List[ForumTag]: - return [ForumTag.from_data(state=entry._state, data=d) for d in data] - - -def _transform_default_reaction(entry: AuditLogEntry, data: DefaultReactionPayload) -> Optional[PartialEmoji]: - if data is None: - return None - - emoji_name = data.get('emoji_name') or '' - emoji_id = utils._get_as_snowflake(data, 'emoji_id') or None # Coerce 0 -> None - return PartialEmoji.with_state(state=entry._state, name=emoji_name, id=emoji_id) - - -def _transform_overwrites( - entry: AuditLogEntry, data: List[PermissionOverwritePayload] -) -> List[Tuple[Object, PermissionOverwrite]]: - overwrites = [] - for elem in data: - allow = Permissions(int(elem['allow'])) - deny = Permissions(int(elem['deny'])) - ow = PermissionOverwrite.from_pair(allow, deny) - - ow_type = elem['type'] - ow_id = int(elem['id']) - target = None - if ow_type == '0': - target = entry.guild.get_role(ow_id) - elif ow_type == '1': - target = entry._get_member(ow_id) - - if target is None: - target = Object(id=ow_id, type=Role if ow_type == '0' else Member) - - overwrites.append((target, ow)) - - return overwrites - - -def _transform_icon(entry: AuditLogEntry, data: Optional[str]) -> Optional[Asset]: - if data is None: - return None - if entry.action is enums.AuditLogAction.guild_update: - return Asset._from_guild_icon(entry._state, entry.guild.id, data) - else: - return Asset._from_icon(entry._state, entry._target_id, data, path='role') # type: ignore # target_id won't be None in this case - - -def _transform_avatar(entry: AuditLogEntry, data: Optional[str]) -> Optional[Asset]: - if data is None: - return None - return Asset._from_avatar(entry._state, entry._target_id, data) # type: ignore # target_id won't be None in this case - - -def _transform_cover_image(entry: AuditLogEntry, data: Optional[str]) -> Optional[Asset]: - if data is None: - return None - return Asset._from_scheduled_event_cover_image(entry._state, entry._target_id, data) # type: ignore # target_id won't be None in this case - - -def _guild_hash_transformer(path: str) -> Callable[[AuditLogEntry, Optional[str]], Optional[Asset]]: - def _transform(entry: AuditLogEntry, data: Optional[str]) -> Optional[Asset]: - if data is None: - return None - return Asset._from_guild_image(entry._state, entry.guild.id, data, path=path) - - return _transform - - -def _transform_automod_trigger_metadata( - entry: AuditLogEntry, data: AutoModerationTriggerMetadata -) -> Optional[AutoModTrigger]: - - if isinstance(entry.target, AutoModRule): - # Trigger type cannot be changed, so type should be the same before and after updates. - # Avoids checking which keys are in data to guess trigger type - # or returning None if data is empty. - try: - return AutoModTrigger.from_data(type=entry.target.trigger.type.value, data=data) - except Exception: - pass - - # Try to infer trigger type from available keys in data - if 'presets' in data: - return AutoModTrigger( - type=enums.AutoModRuleTriggerType.keyword_preset, - presets=AutoModPresets._from_value(data['presets']), # type: ignore - allow_list=data.get('allow_list'), - ) - elif 'keyword_filter' in data: - return AutoModTrigger( - type=enums.AutoModRuleTriggerType.keyword, - keyword_filter=data['keyword_filter'], # type: ignore - allow_list=data.get('allow_list'), - regex_patterns=data.get('regex_patterns'), - ) - elif 'mention_total_limit' in data: - return AutoModTrigger(type=enums.AutoModRuleTriggerType.mention_spam, mention_limit=data['mention_total_limit']) # type: ignore - else: - return AutoModTrigger(type=enums.AutoModRuleTriggerType.spam) - - -def _transform_automod_actions(entry: AuditLogEntry, data: List[AutoModerationAction]) -> List[AutoModRuleAction]: - return [AutoModRuleAction.from_data(action) for action in data] - - -E = TypeVar('E', bound=enums.Enum) - - -def _enum_transformer(enum: Type[E]) -> Callable[[AuditLogEntry, int], E]: - def _transform(entry: AuditLogEntry, data: int) -> E: - return enums.try_enum(enum, data) - - return _transform - - -F = TypeVar('F', bound=flags.BaseFlags) - - -def _flag_transformer(cls: Type[F]) -> Callable[[AuditLogEntry, Union[int, str]], F]: - def _transform(entry: AuditLogEntry, data: Union[int, str]) -> F: - return cls._from_value(int(data)) - - return _transform - - -def _transform_type( - entry: AuditLogEntry, data: Union[int, str] -) -> Union[enums.ChannelType, enums.StickerType, enums.WebhookType, str]: - if entry.action.name.startswith('sticker_'): - return enums.try_enum(enums.StickerType, data) - elif entry.action.name.startswith('integration_'): - return data # type: ignore # integration type is str - elif entry.action.name.startswith('webhook_'): - return enums.try_enum(enums.WebhookType, data) - else: - return enums.try_enum(enums.ChannelType, data) - - -class AuditLogDiff: - def __len__(self) -> int: - return len(self.__dict__) - - def __iter__(self) -> Generator[Tuple[str, Any], None, None]: - yield from self.__dict__.items() - - def __repr__(self) -> str: - values = ' '.join('%s=%r' % item for item in self.__dict__.items()) - return f'' - - if TYPE_CHECKING: - - def __getattr__(self, item: str) -> Any: - ... - - def __setattr__(self, key: str, value: Any) -> Any: - ... - - -Transformer = Callable[["AuditLogEntry", Any], Any] - - -class AuditLogChanges: - # fmt: off - TRANSFORMERS: ClassVar[Mapping[str, Tuple[Optional[str], Optional[Transformer]]]] = { - 'verification_level': (None, _enum_transformer(enums.VerificationLevel)), - 'explicit_content_filter': (None, _enum_transformer(enums.ContentFilter)), - 'allow': (None, _flag_transformer(Permissions)), - 'deny': (None, _flag_transformer(Permissions)), - 'permissions': (None, _flag_transformer(Permissions)), - 'id': (None, _transform_snowflake), - 'color': ('colour', _transform_color), - 'owner_id': ('owner', _transform_member_id), - 'inviter_id': ('inviter', _transform_member_id), - 'channel_id': ('channel', _transform_channel), - 'afk_channel_id': ('afk_channel', _transform_channel), - 'system_channel_id': ('system_channel', _transform_channel), - 'system_channel_flags': (None, _flag_transformer(flags.SystemChannelFlags)), - 'widget_channel_id': ('widget_channel', _transform_channel), - 'rules_channel_id': ('rules_channel', _transform_channel), - 'public_updates_channel_id': ('public_updates_channel', _transform_channel), - 'permission_overwrites': ('overwrites', _transform_overwrites), - 'splash_hash': ('splash', _guild_hash_transformer('splashes')), - 'banner_hash': ('banner', _guild_hash_transformer('banners')), - 'discovery_splash_hash': ('discovery_splash', _guild_hash_transformer('discovery-splashes')), - 'icon_hash': ('icon', _transform_icon), - 'avatar_hash': ('avatar', _transform_avatar), - 'rate_limit_per_user': ('slowmode_delay', None), - 'default_thread_rate_limit_per_user': ('default_thread_slowmode_delay', None), - 'guild_id': ('guild', _transform_guild_id), - 'tags': ('emoji', None), - 'default_message_notifications': ('default_notifications', _enum_transformer(enums.NotificationLevel)), - 'video_quality_mode': (None, _enum_transformer(enums.VideoQualityMode)), - 'privacy_level': (None, _enum_transformer(enums.PrivacyLevel)), - 'format_type': (None, _enum_transformer(enums.StickerFormatType)), - 'type': (None, _transform_type), - 'communication_disabled_until': ('timed_out_until', _transform_timestamp), - 'expire_behavior': (None, _enum_transformer(enums.ExpireBehaviour)), - 'mfa_level': (None, _enum_transformer(enums.MFALevel)), - 'status': (None, _enum_transformer(enums.EventStatus)), - 'entity_type': (None, _enum_transformer(enums.EntityType)), - 'preferred_locale': (None, _enum_transformer(enums.Locale)), - 'image_hash': ('cover_image', _transform_cover_image), - 'trigger_type': (None, _enum_transformer(enums.AutoModRuleTriggerType)), - 'event_type': (None, _enum_transformer(enums.AutoModRuleEventType)), - 'trigger_metadata': ('trigger', _transform_automod_trigger_metadata), - 'actions': (None, _transform_automod_actions), - 'exempt_channels': (None, _transform_channels_or_threads), - 'exempt_roles': (None, _transform_roles), - 'applied_tags': (None, _transform_applied_forum_tags), - 'available_tags': (None, _transform_forum_tags), - 'flags': (None, _transform_overloaded_flags), - 'default_reaction_emoji': (None, _transform_default_reaction), - } - # fmt: on - - def __init__(self, entry: AuditLogEntry, data: List[AuditLogChangePayload]): - self.before: AuditLogDiff = AuditLogDiff() - self.after: AuditLogDiff = AuditLogDiff() - # special case entire process since each - # element in data is a different target - # key is the target id - if entry.action is enums.AuditLogAction.app_command_permission_update: - self.before.app_command_permissions = [] - self.after.app_command_permissions = [] - - for elem in data: - self._handle_app_command_permissions( - self.before, - entry, - elem.get('old_value'), # type: ignore # value will be an ApplicationCommandPermissions if present - ) - - self._handle_app_command_permissions( - self.after, - entry, - elem.get('new_value'), # type: ignore # value will be an ApplicationCommandPermissions if present - ) - return - - for elem in data: - attr = elem['key'] - - # special cases for role add/remove - if attr == '$add': - self._handle_role(self.before, self.after, entry, elem['new_value']) # type: ignore # new_value is a list of roles in this case - continue - elif attr == '$remove': - self._handle_role(self.after, self.before, entry, elem['new_value']) # type: ignore # new_value is a list of roles in this case - continue - - try: - key, transformer = self.TRANSFORMERS[attr] - except (ValueError, KeyError): - transformer = None - else: - if key: - attr = key - - transformer: Optional[Transformer] - - try: - before = elem['old_value'] - except KeyError: - before = None - else: - if transformer: - before = transformer(entry, before) - - setattr(self.before, attr, before) - - try: - after = elem['new_value'] - except KeyError: - after = None - else: - if transformer: - after = transformer(entry, after) - - setattr(self.after, attr, after) - - # add an alias - if hasattr(self.after, 'colour'): - self.after.color = self.after.colour - self.before.color = self.before.colour - if hasattr(self.after, 'expire_behavior'): - self.after.expire_behaviour = self.after.expire_behavior - self.before.expire_behaviour = self.before.expire_behavior - - def __repr__(self) -> str: - return f'' - - def _handle_role(self, first: AuditLogDiff, second: AuditLogDiff, entry: AuditLogEntry, elem: List[RolePayload]) -> None: - if not hasattr(first, 'roles'): - setattr(first, 'roles', []) - - data = [] - g: Guild = entry.guild - - for e in elem: - role_id = int(e['id']) - role = g.get_role(role_id) - - if role is None: - role = Object(id=role_id, type=Role) - role.name = e['name'] # type: ignore # Object doesn't usually have name - - data.append(role) - - setattr(second, 'roles', data) - - def _handle_app_command_permissions( - self, - diff: AuditLogDiff, - entry: AuditLogEntry, - data: Optional[ApplicationCommandPermissions], - ): - if data is None: - return - - # avoid circular import - from discord.app_commands import AppCommandPermissions - - state = entry._state - guild = entry.guild - diff.app_command_permissions.append(AppCommandPermissions(data=data, guild=guild, state=state)) - - -class _AuditLogProxy: - def __init__(self, **kwargs: Any) -> None: - for k, v in kwargs.items(): - setattr(self, k, v) - - -class _AuditLogProxyMemberPrune(_AuditLogProxy): - delete_member_days: int - members_removed: int - - -class _AuditLogProxyMemberMoveOrMessageDelete(_AuditLogProxy): - channel: Union[abc.GuildChannel, Thread] - count: int - - -class _AuditLogProxyMemberDisconnect(_AuditLogProxy): - count: int - - -class _AuditLogProxyPinAction(_AuditLogProxy): - channel: Union[abc.GuildChannel, Thread] - message_id: int - - -class _AuditLogProxyStageInstanceAction(_AuditLogProxy): - channel: abc.GuildChannel - - -class _AuditLogProxyMessageBulkDelete(_AuditLogProxy): - count: int - - -class _AuditLogProxyAutoModAction(_AuditLogProxy): - automod_rule_name: str - automod_rule_trigger_type: str - channel: Optional[Union[abc.GuildChannel, Thread]] - - -class AuditLogEntry(Hashable): - r"""Represents an Audit Log entry. - - You retrieve these via :meth:`Guild.audit_logs`. - - .. container:: operations - - .. describe:: x == y - - Checks if two entries are equal. - - .. describe:: x != y - - Checks if two entries are not equal. - - .. describe:: hash(x) - - Returns the entry's hash. - - .. versionchanged:: 1.7 - Audit log entries are now comparable and hashable. - - Attributes - ----------- - action: :class:`AuditLogAction` - The action that was done. - user: Optional[:class:`abc.User`] - The user who initiated this action. Usually a :class:`Member`\, unless gone - then it's a :class:`User`. - user_id: Optional[:class:`int`] - The user ID who initiated this action. - - .. versionadded:: 2.2 - id: :class:`int` - The entry ID. - guild: :class:`Guild` - The guild that this entry belongs to. - target: Any - The target that got changed. The exact type of this depends on - the action being done. - reason: Optional[:class:`str`] - The reason this action was done. - extra: Any - Extra information that this entry has that might be useful. - For most actions, this is ``None``. However in some cases it - contains extra information. See :class:`AuditLogAction` for - which actions have this field filled out. - """ - - def __init__( - self, - *, - users: Mapping[int, User], - integrations: Mapping[int, PartialIntegration], - app_commands: Mapping[int, AppCommand], - automod_rules: Mapping[int, AutoModRule], - webhooks: Mapping[int, Webhook], - data: AuditLogEntryPayload, - guild: Guild, - ): - self._state: ConnectionState = guild._state - self.guild: Guild = guild - self._users: Mapping[int, User] = users - self._integrations: Mapping[int, PartialIntegration] = integrations - self._app_commands: Mapping[int, AppCommand] = app_commands - self._automod_rules: Mapping[int, AutoModRule] = automod_rules - self._webhooks: Mapping[int, Webhook] = webhooks - self._from_data(data) - - def _from_data(self, data: AuditLogEntryPayload) -> None: - self.action: enums.AuditLogAction = enums.try_enum(enums.AuditLogAction, data['action_type']) - self.id: int = int(data['id']) - - # this key is technically not usually present - self.reason: Optional[str] = data.get('reason') - extra = data.get('options') - - # fmt: off - self.extra: Union[ - _AuditLogProxyMemberPrune, - _AuditLogProxyMemberMoveOrMessageDelete, - _AuditLogProxyMemberDisconnect, - _AuditLogProxyPinAction, - _AuditLogProxyStageInstanceAction, - _AuditLogProxyMessageBulkDelete, - _AuditLogProxyAutoModAction, - Member, User, None, PartialIntegration, - Role, Object - ] = None - # fmt: on - - if isinstance(self.action, enums.AuditLogAction) and extra: - if self.action is enums.AuditLogAction.member_prune: - # member prune has two keys with useful information - self.extra = _AuditLogProxyMemberPrune( - delete_member_days=int(extra['delete_member_days']), - members_removed=int(extra['members_removed']), - ) - elif self.action is enums.AuditLogAction.member_move or self.action is enums.AuditLogAction.message_delete: - channel_id = int(extra['channel_id']) - self.extra = _AuditLogProxyMemberMoveOrMessageDelete( - count=int(extra['count']), - channel=self.guild.get_channel_or_thread(channel_id) or Object(id=channel_id), - ) - elif self.action is enums.AuditLogAction.member_disconnect: - # The member disconnect action has a dict with some information - self.extra = _AuditLogProxyMemberDisconnect(count=int(extra['count'])) - elif self.action is enums.AuditLogAction.message_bulk_delete: - # The bulk message delete action has the number of messages deleted - self.extra = _AuditLogProxyMessageBulkDelete(count=int(extra['count'])) - elif self.action.name.endswith('pin'): - # the pin actions have a dict with some information - channel_id = int(extra['channel_id']) - self.extra = _AuditLogProxyPinAction( - channel=self.guild.get_channel_or_thread(channel_id) or Object(id=channel_id), - message_id=int(extra['message_id']), - ) - elif ( - self.action is enums.AuditLogAction.automod_block_message - or self.action is enums.AuditLogAction.automod_flag_message - or self.action is enums.AuditLogAction.automod_timeout_member - ): - channel_id = utils._get_as_snowflake(extra, 'channel_id') - channel = None - - # May be an empty string instead of None due to a Discord issue - if channel_id: - channel = self.guild.get_channel_or_thread(channel_id) or Object(id=channel_id) - - self.extra = _AuditLogProxyAutoModAction( - automod_rule_name=extra['auto_moderation_rule_name'], - automod_rule_trigger_type=enums.try_enum( - enums.AutoModRuleTriggerType, extra['auto_moderation_rule_trigger_type'] - ), - channel=channel, - ) - - elif self.action.name.startswith('overwrite_'): - # the overwrite_ actions have a dict with some information - instance_id = int(extra['id']) - the_type = extra.get('type') - if the_type == '1': - self.extra = self._get_member(instance_id) - elif the_type == '0': - role = self.guild.get_role(instance_id) - if role is None: - role = Object(id=instance_id, type=Role) - role.name = extra.get('role_name') # type: ignore # Object doesn't usually have name - self.extra = role - elif self.action.name.startswith('stage_instance'): - channel_id = int(extra['channel_id']) - self.extra = _AuditLogProxyStageInstanceAction( - channel=self.guild.get_channel(channel_id) or Object(id=channel_id, type=StageChannel) - ) - elif self.action.name.startswith('app_command'): - app_id = int(extra['application_id']) - self.extra = self._get_integration_by_app_id(app_id) or Object(app_id, type=PartialIntegration) - - # this key is not present when the above is present, typically. - # It's a list of { new_value: a, old_value: b, key: c } - # where new_value and old_value are not guaranteed to be there depending - # on the action type, so let's just fetch it for now and only turn it - # into meaningful data when requested - self._changes = data.get('changes', []) - - self.user_id: Optional[int] = utils._get_as_snowflake(data, 'user_id') - self.user: Optional[Union[User, Member]] = self._get_member(self.user_id) - self._target_id = utils._get_as_snowflake(data, 'target_id') - - def _get_member(self, user_id: Optional[int]) -> Union[Member, User, None]: - if user_id is None: - return None - - return self.guild.get_member(user_id) or self._users.get(user_id) - - def _get_integration(self, integration_id: Optional[int]) -> Optional[PartialIntegration]: - if integration_id is None: - return None - - return self._integrations.get(integration_id) - - def _get_integration_by_app_id(self, application_id: Optional[int]) -> Optional[PartialIntegration]: - if application_id is None: - return None - - # get PartialIntegration by application id - return utils.get(self._integrations.values(), application_id=application_id) - - def _get_app_command(self, app_command_id: Optional[int]) -> Optional[AppCommand]: - if app_command_id is None: - return None - - return self._app_commands.get(app_command_id) - - def __repr__(self) -> str: - return f'' - - @utils.cached_property - def created_at(self) -> datetime.datetime: - """:class:`datetime.datetime`: Returns the entry's creation time in UTC.""" - return utils.snowflake_time(self.id) - - @utils.cached_property - def target(self) -> TargetType: - if self.action.target_type is None: - return None - - try: - converter = getattr(self, '_convert_target_' + self.action.target_type) - except AttributeError: - if self._target_id is None: - return None - return Object(id=self._target_id) - else: - return converter(self._target_id) - - @utils.cached_property - def category(self) -> Optional[enums.AuditLogActionCategory]: - """Optional[:class:`AuditLogActionCategory`]: The category of the action, if applicable.""" - return self.action.category - - @utils.cached_property - def changes(self) -> AuditLogChanges: - """:class:`AuditLogChanges`: The list of changes this entry has.""" - obj = AuditLogChanges(self, self._changes) - del self._changes - return obj - - @utils.cached_property - def before(self) -> AuditLogDiff: - """:class:`AuditLogDiff`: The target's prior state.""" - return self.changes.before - - @utils.cached_property - def after(self) -> AuditLogDiff: - """:class:`AuditLogDiff`: The target's subsequent state.""" - return self.changes.after - - def _convert_target_guild(self, target_id: int) -> Guild: - return self.guild - - def _convert_target_channel(self, target_id: int) -> Union[abc.GuildChannel, Object]: - return self.guild.get_channel(target_id) or Object(id=target_id) - - def _convert_target_user(self, target_id: Optional[int]) -> Optional[Union[Member, User, Object]]: - # For some reason the member_disconnect and member_move action types - # do not have a non-null target_id so safeguard against that - if target_id is None: - return None - - return self._get_member(target_id) or Object(id=target_id, type=Member) - - def _convert_target_role(self, target_id: int) -> Union[Role, Object]: - return self.guild.get_role(target_id) or Object(id=target_id, type=Role) - - def _convert_target_invite(self, target_id: None) -> Invite: - # invites have target_id set to null - # so figure out which change has the full invite data - changeset = self.before if self.action is enums.AuditLogAction.invite_delete else self.after - - fake_payload: InvitePayload = { - 'max_age': changeset.max_age, - 'max_uses': changeset.max_uses, - 'code': changeset.code, - 'temporary': changeset.temporary, - 'uses': changeset.uses, - 'channel': None, # type: ignore # the channel is passed to the Invite constructor directly - } - - obj = Invite(state=self._state, data=fake_payload, guild=self.guild, channel=changeset.channel) - try: - obj.inviter = changeset.inviter - except AttributeError: - pass - return obj - - def _convert_target_emoji(self, target_id: int) -> Union[Emoji, Object]: - return self._state.get_emoji(target_id) or Object(id=target_id, type=Emoji) - - def _convert_target_message(self, target_id: int) -> Union[Member, User, Object]: - return self._get_member(target_id) or Object(id=target_id, type=Member) - - def _convert_target_stage_instance(self, target_id: int) -> Union[StageInstance, Object]: - return self.guild.get_stage_instance(target_id) or Object(id=target_id, type=StageInstance) - - def _convert_target_sticker(self, target_id: int) -> Union[GuildSticker, Object]: - return self._state.get_sticker(target_id) or Object(id=target_id, type=GuildSticker) - - def _convert_target_thread(self, target_id: int) -> Union[Thread, Object]: - return self.guild.get_thread(target_id) or Object(id=target_id, type=Thread) - - def _convert_target_guild_scheduled_event(self, target_id: int) -> Union[ScheduledEvent, Object]: - return self.guild.get_scheduled_event(target_id) or Object(id=target_id, type=ScheduledEvent) - - def _convert_target_integration(self, target_id: int) -> Union[PartialIntegration, Object]: - return self._get_integration(target_id) or Object(target_id, type=PartialIntegration) - - def _convert_target_app_command(self, target_id: int) -> Union[AppCommand, Object]: - target = self._get_app_command(target_id) - if not target: - # circular import - from .app_commands import AppCommand - - target = Object(target_id, type=AppCommand) - - return target - - def _convert_target_integration_or_app_command(self, target_id: int) -> Union[PartialIntegration, AppCommand, Object]: - target = self._get_integration_by_app_id(target_id) or self._get_app_command(target_id) - if not target: - try: - # circular import - from .app_commands import AppCommand - - # get application id from extras - # if it matches target id, type should be integration - target_app = self.extra - # extra should be an Object or PartialIntegration - app_id = target_app.application_id if isinstance(target_app, PartialIntegration) else target_app.id # type: ignore - type = PartialIntegration if target_id == app_id else AppCommand - except AttributeError: - return Object(target_id) - else: - return Object(target_id, type=type) - - return target - - def _convert_target_auto_moderation(self, target_id: int) -> Union[AutoModRule, Object]: - return self._automod_rules.get(target_id) or Object(target_id, type=AutoModRule) - - def _convert_target_webhook(self, target_id: int) -> Union[Webhook, Object]: - # circular import - from .webhook import Webhook - - return self._webhooks.get(target_id) or Object(target_id, type=Webhook) diff --git a/.venv/Lib/site-packages/discord/automod.py b/.venv/Lib/site-packages/discord/automod.py deleted file mode 100644 index 84a00c8..0000000 --- a/.venv/Lib/site-packages/discord/automod.py +++ /dev/null @@ -1,600 +0,0 @@ -""" -The MIT License (MIT) - -Copyright (c) 2015-present Rapptz - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the "Software"), -to deal in the Software without restriction, including without limitation -the rights to use, copy, modify, merge, publish, distribute, sublicense, -and/or sell copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. -""" - -from __future__ import annotations -import datetime - -from typing import TYPE_CHECKING, Any, Dict, Optional, List, Set, Union, Sequence, overload - -from .enums import AutoModRuleTriggerType, AutoModRuleActionType, AutoModRuleEventType, try_enum -from .flags import AutoModPresets -from . import utils -from .utils import MISSING, cached_slot_property - -if TYPE_CHECKING: - from typing_extensions import Self - from .abc import Snowflake, GuildChannel - from .threads import Thread - from .guild import Guild - from .member import Member - from .state import ConnectionState - from .types.automod import ( - AutoModerationRule as AutoModerationRulePayload, - AutoModerationTriggerMetadata as AutoModerationTriggerMetadataPayload, - AutoModerationAction as AutoModerationActionPayload, - AutoModerationActionExecution as AutoModerationActionExecutionPayload, - ) - from .role import Role - -__all__ = ( - 'AutoModRuleAction', - 'AutoModTrigger', - 'AutoModRule', - 'AutoModAction', -) - - -class AutoModRuleAction: - """Represents an auto moderation's rule action. - - .. note:: - Only one of ``channel_id``, ``duration``, or ``custom_message`` can be used. - - .. versionadded:: 2.0 - - Attributes - ----------- - type: :class:`AutoModRuleActionType` - The type of action to take. - Defaults to :attr:`~AutoModRuleActionType.block_message`. - channel_id: Optional[:class:`int`] - The ID of the channel or thread to send the alert message to, if any. - Passing this sets :attr:`type` to :attr:`~AutoModRuleActionType.send_alert_message`. - duration: Optional[:class:`datetime.timedelta`] - The duration of the timeout to apply, if any. - Has a maximum of 28 days. - Passing this sets :attr:`type` to :attr:`~AutoModRuleActionType.timeout`. - custom_message: Optional[:class:`str`] - A custom message which will be shown to a user when their message is blocked. - Passing this sets :attr:`type` to :attr:`~AutoModRuleActionType.block_message`. - - .. versionadded:: 2.2 - """ - - __slots__ = ('type', 'channel_id', 'duration', 'custom_message') - - @overload - def __init__(self, *, channel_id: Optional[int] = ...) -> None: - ... - - @overload - def __init__(self, *, duration: Optional[datetime.timedelta] = ...) -> None: - ... - - @overload - def __init__(self, *, custom_message: Optional[str] = ...) -> None: - ... - - def __init__( - self, - *, - channel_id: Optional[int] = None, - duration: Optional[datetime.timedelta] = None, - custom_message: Optional[str] = None, - ) -> None: - self.channel_id: Optional[int] = channel_id - self.duration: Optional[datetime.timedelta] = duration - self.custom_message: Optional[str] = custom_message - - if sum(v is None for v in (channel_id, duration, custom_message)) < 2: - raise ValueError('Only one of channel_id, duration, or custom_message can be passed.') - - self.type: AutoModRuleActionType = AutoModRuleActionType.block_message - if channel_id: - self.type = AutoModRuleActionType.send_alert_message - elif duration: - self.type = AutoModRuleActionType.timeout - - def __repr__(self) -> str: - return f'' - - @classmethod - def from_data(cls, data: AutoModerationActionPayload) -> Self: - if data['type'] == AutoModRuleActionType.timeout.value: - duration_seconds = data['metadata']['duration_seconds'] - return cls(duration=datetime.timedelta(seconds=duration_seconds)) - elif data['type'] == AutoModRuleActionType.send_alert_message.value: - channel_id = int(data['metadata']['channel_id']) - return cls(channel_id=channel_id) - return cls(custom_message=data.get('metadata', {}).get('custom_message')) - - def to_dict(self) -> Dict[str, Any]: - ret = {'type': self.type.value, 'metadata': {}} - if self.type is AutoModRuleActionType.block_message and self.custom_message is not None: - ret['metadata'] = {'custom_message': self.custom_message} - elif self.type is AutoModRuleActionType.timeout: - ret['metadata'] = {'duration_seconds': int(self.duration.total_seconds())} # type: ignore # duration cannot be None here - elif self.type is AutoModRuleActionType.send_alert_message: - ret['metadata'] = {'channel_id': str(self.channel_id)} - return ret - - -class AutoModTrigger: - r"""Represents a trigger for an auto moderation rule. - - The following table illustrates relevant attributes for each :class:`AutoModRuleTriggerType`: - - +-----------------------------------------------+------------------------------------------------+ - | Type | Attributes | - +===============================================+================================================+ - | :attr:`AutoModRuleTriggerType.keyword` | :attr:`keyword_filter`, :attr:`regex_patterns`,| - | | :attr:`allow_list` | - +-----------------------------------------------+------------------------------------------------+ - | :attr:`AutoModRuleTriggerType.spam` | | - +-----------------------------------------------+------------------------------------------------+ - | :attr:`AutoModRuleTriggerType.keyword_preset` | :attr:`presets`\, :attr:`allow_list` | - +-----------------------------------------------+------------------------------------------------+ - | :attr:`AutoModRuleTriggerType.mention_spam` | :attr:`mention_limit` | - +-----------------------------------------------+------------------------------------------------+ - - .. versionadded:: 2.0 - - Attributes - ----------- - type: :class:`AutoModRuleTriggerType` - The type of trigger. - keyword_filter: List[:class:`str`] - The list of strings that will trigger the keyword filter. Maximum of 1000. - Keywords can only be up to 60 characters in length. - - This could be combined with :attr:`regex_patterns`. - regex_patterns: List[:class:`str`] - The regex pattern that will trigger the filter. The syntax is based off of - `Rust's regex syntax `_. - Maximum of 10. Regex strings can only be up to 260 characters in length. - - This could be combined with :attr:`keyword_filter` and/or :attr:`allow_list` - - .. versionadded:: 2.1 - presets: :class:`AutoModPresets` - The presets used with the preset keyword filter. - allow_list: List[:class:`str`] - The list of words that are exempt from the commonly flagged words. Maximum of 100. - Keywords can only be up to 60 characters in length. - mention_limit: :class:`int` - The total number of user and role mentions a message can contain. - Has a maximum of 50. - """ - - __slots__ = ( - 'type', - 'keyword_filter', - 'presets', - 'allow_list', - 'mention_limit', - 'regex_patterns', - ) - - def __init__( - self, - *, - type: Optional[AutoModRuleTriggerType] = None, - keyword_filter: Optional[List[str]] = None, - presets: Optional[AutoModPresets] = None, - allow_list: Optional[List[str]] = None, - mention_limit: Optional[int] = None, - regex_patterns: Optional[List[str]] = None, - ) -> None: - if type is None and sum(arg is not None for arg in (keyword_filter or regex_patterns, presets, mention_limit)) > 1: - raise ValueError('Please pass only one of keyword_filter, regex_patterns, presets, or mention_limit.') - - if type is not None: - self.type = type - elif keyword_filter is not None or regex_patterns is not None: - self.type = AutoModRuleTriggerType.keyword - elif presets is not None: - self.type = AutoModRuleTriggerType.keyword_preset - elif mention_limit is not None: - self.type = AutoModRuleTriggerType.mention_spam - else: - raise ValueError( - 'Please pass the trigger type explicitly if not using keyword_filter, presets, or mention_limit.' - ) - - self.keyword_filter: List[str] = keyword_filter if keyword_filter is not None else [] - self.presets: AutoModPresets = presets if presets is not None else AutoModPresets() - self.allow_list: List[str] = allow_list if allow_list is not None else [] - self.mention_limit: int = mention_limit if mention_limit is not None else 0 - self.regex_patterns: List[str] = regex_patterns if regex_patterns is not None else [] - - def __repr__(self) -> str: - data = self.to_metadata_dict() - if data: - joined = ' '.join(f'{k}={v!r}' for k, v in data.items()) - return f'' - - return f'' - - @classmethod - def from_data(cls, type: int, data: Optional[AutoModerationTriggerMetadataPayload]) -> Self: - type_ = try_enum(AutoModRuleTriggerType, type) - if data is None: - return cls(type=type_) - elif type_ is AutoModRuleTriggerType.keyword: - return cls( - type=type_, - keyword_filter=data.get('keyword_filter'), - regex_patterns=data.get('regex_patterns'), - allow_list=data.get('allow_list'), - ) - elif type_ is AutoModRuleTriggerType.keyword_preset: - return cls( - type=type_, presets=AutoModPresets._from_value(data.get('presets', [])), allow_list=data.get('allow_list') - ) - elif type_ is AutoModRuleTriggerType.mention_spam: - return cls(type=type_, mention_limit=data.get('mention_total_limit')) - else: - return cls(type=type_) - - def to_metadata_dict(self) -> Optional[Dict[str, Any]]: - if self.type is AutoModRuleTriggerType.keyword: - return { - 'keyword_filter': self.keyword_filter, - 'regex_patterns': self.regex_patterns, - 'allow_list': self.allow_list, - } - elif self.type is AutoModRuleTriggerType.keyword_preset: - return {'presets': self.presets.to_array(), 'allow_list': self.allow_list} - elif self.type is AutoModRuleTriggerType.mention_spam: - return {'mention_total_limit': self.mention_limit} - - -class AutoModRule: - """Represents an auto moderation rule. - - .. versionadded:: 2.0 - - Attributes - ----------- - id: :class:`int` - The ID of the rule. - guild: :class:`Guild` - The guild the rule is for. - name: :class:`str` - The name of the rule. - creator_id: :class:`int` - The ID of the user that created the rule. - trigger: :class:`AutoModTrigger` - The rule's trigger. - enabled: :class:`bool` - Whether the rule is enabled. - exempt_role_ids: Set[:class:`int`] - The IDs of the roles that are exempt from the rule. - exempt_channel_ids: Set[:class:`int`] - The IDs of the channels that are exempt from the rule. - """ - - __slots__ = ( - '_state', - '_cs_exempt_roles', - '_cs_exempt_channels', - '_cs_actions', - 'id', - 'guild', - 'name', - 'creator_id', - 'event_type', - 'trigger', - 'enabled', - 'exempt_role_ids', - 'exempt_channel_ids', - '_actions', - ) - - def __init__(self, *, data: AutoModerationRulePayload, guild: Guild, state: ConnectionState) -> None: - self._state: ConnectionState = state - self.guild: Guild = guild - self.id: int = int(data['id']) - self.name: str = data['name'] - self.creator_id = int(data['creator_id']) - self.event_type: AutoModRuleEventType = try_enum(AutoModRuleEventType, data['event_type']) - self.trigger: AutoModTrigger = AutoModTrigger.from_data(data['trigger_type'], data=data.get('trigger_metadata')) - self.enabled: bool = data['enabled'] - self.exempt_role_ids: Set[int] = {int(role_id) for role_id in data['exempt_roles']} - self.exempt_channel_ids: Set[int] = {int(channel_id) for channel_id in data['exempt_channels']} - self._actions: List[AutoModerationActionPayload] = data['actions'] - - def __repr__(self) -> str: - return f'' - - def to_dict(self) -> AutoModerationRulePayload: - ret: AutoModerationRulePayload = { - 'id': str(self.id), - 'guild_id': str(self.guild.id), - 'name': self.name, - 'creator_id': str(self.creator_id), - 'event_type': self.event_type.value, - 'trigger_type': self.trigger.type.value, - 'trigger_metadata': self.trigger.to_metadata_dict(), - 'actions': [action.to_dict() for action in self.actions], - 'enabled': self.enabled, - 'exempt_roles': [str(role_id) for role_id in self.exempt_role_ids], - 'exempt_channels': [str(channel_id) for channel_id in self.exempt_channel_ids], - } # type: ignore # trigger types break the flow here. - - return ret - - @property - def creator(self) -> Optional[Member]: - """Optional[:class:`Member`]: The member that created this rule.""" - return self.guild.get_member(self.creator_id) - - @cached_slot_property('_cs_exempt_roles') - def exempt_roles(self) -> List[Role]: - """List[:class:`Role`]: The roles that are exempt from this rule.""" - result = [] - get_role = self.guild.get_role - for role_id in self.exempt_role_ids: - role = get_role(role_id) - if role is not None: - result.append(role) - - return utils._unique(result) - - @cached_slot_property('_cs_exempt_channels') - def exempt_channels(self) -> List[Union[GuildChannel, Thread]]: - """List[Union[:class:`abc.GuildChannel`, :class:`Thread`]]: The channels that are exempt from this rule.""" - it = filter(None, map(self.guild._resolve_channel, self.exempt_channel_ids)) - return utils._unique(it) - - @cached_slot_property('_cs_actions') - def actions(self) -> List[AutoModRuleAction]: - """List[:class:`AutoModRuleAction`]: The actions that are taken when this rule is triggered.""" - return [AutoModRuleAction.from_data(action) for action in self._actions] - - def is_exempt(self, obj: Snowflake, /) -> bool: - """Check if an object is exempt from the automod rule. - - Parameters - ----------- - obj: :class:`abc.Snowflake` - The role, channel, or thread to check. - - Returns - -------- - :class:`bool` - Whether the object is exempt from the automod rule. - """ - return obj.id in self.exempt_channel_ids or obj.id in self.exempt_role_ids - - async def edit( - self, - *, - name: str = MISSING, - event_type: AutoModRuleEventType = MISSING, - actions: List[AutoModRuleAction] = MISSING, - trigger: AutoModTrigger = MISSING, - enabled: bool = MISSING, - exempt_roles: Sequence[Snowflake] = MISSING, - exempt_channels: Sequence[Snowflake] = MISSING, - reason: str = MISSING, - ) -> Self: - """|coro| - - Edits this auto moderation rule. - - You must have :attr:`Permissions.manage_guild` to edit rules. - - Parameters - ----------- - name: :class:`str` - The new name to change to. - event_type: :class:`AutoModRuleEventType` - The new event type to change to. - actions: List[:class:`AutoModRuleAction`] - The new rule actions to update. - trigger: :class:`AutoModTrigger` - The new trigger to update. - You can only change the trigger metadata, not the type. - enabled: :class:`bool` - Whether the rule should be enabled or not. - exempt_roles: Sequence[:class:`abc.Snowflake`] - The new roles to exempt from the rule. - exempt_channels: Sequence[:class:`abc.Snowflake`] - The new channels to exempt from the rule. - reason: :class:`str` - The reason for updating this rule. Shows up on the audit log. - - Raises - ------- - Forbidden - You do not have permission to edit this rule. - HTTPException - Editing the rule failed. - - Returns - -------- - :class:`AutoModRule` - The updated auto moderation rule. - """ - payload = {} - if actions is not MISSING: - payload['actions'] = [action.to_dict() for action in actions] - - if name is not MISSING: - payload['name'] = name - - if event_type is not MISSING: - payload['event_type'] = event_type - - if trigger is not MISSING: - trigger_metadata = trigger.to_metadata_dict() - if trigger_metadata is not None: - payload['trigger_metadata'] = trigger_metadata - - if enabled is not MISSING: - payload['enabled'] = enabled - - if exempt_roles is not MISSING: - payload['exempt_roles'] = [x.id for x in exempt_roles] - - if exempt_channels is not MISSING: - payload['exempt_channels'] = [x.id for x in exempt_channels] - - data = await self._state.http.edit_auto_moderation_rule( - self.guild.id, - self.id, - reason=reason, - **payload, - ) - - return AutoModRule(data=data, guild=self.guild, state=self._state) - - async def delete(self, *, reason: str = MISSING) -> None: - """|coro| - - Deletes the auto moderation rule. - - You must have :attr:`Permissions.manage_guild` to delete rules. - - Parameters - ----------- - reason: :class:`str` - The reason for deleting this rule. Shows up on the audit log. - - Raises - ------- - Forbidden - You do not have permissions to delete the rule. - HTTPException - Deleting the rule failed. - """ - await self._state.http.delete_auto_moderation_rule(self.guild.id, self.id, reason=reason) - - -class AutoModAction: - """Represents an action that was taken as the result of a moderation rule. - - .. versionadded:: 2.0 - - Attributes - ----------- - action: :class:`AutoModRuleAction` - The action that was taken. - message_id: Optional[:class:`int`] - The message ID that triggered the action. This is only available if the - action is done on an edited message. - rule_id: :class:`int` - The ID of the rule that was triggered. - rule_trigger_type: :class:`AutoModRuleTriggerType` - The trigger type of the rule that was triggered. - guild_id: :class:`int` - The ID of the guild where the rule was triggered. - user_id: :class:`int` - The ID of the user that triggered the rule. - channel_id: :class:`int` - The ID of the channel where the rule was triggered. - alert_system_message_id: Optional[:class:`int`] - The ID of the system message that was sent to the predefined alert channel. - content: :class:`str` - The content of the message that triggered the rule. - Requires the :attr:`Intents.message_content` or it will always return an empty string. - matched_keyword: Optional[:class:`str`] - The matched keyword from the triggering message. - matched_content: Optional[:class:`str`] - The matched content from the triggering message. - Requires the :attr:`Intents.message_content` or it will always return ``None``. - """ - - __slots__ = ( - '_state', - 'action', - 'rule_id', - 'rule_trigger_type', - 'guild_id', - 'user_id', - 'channel_id', - 'message_id', - 'alert_system_message_id', - 'content', - 'matched_keyword', - 'matched_content', - ) - - def __init__(self, *, data: AutoModerationActionExecutionPayload, state: ConnectionState) -> None: - self._state: ConnectionState = state - self.message_id: Optional[int] = utils._get_as_snowflake(data, 'message_id') - self.action: AutoModRuleAction = AutoModRuleAction.from_data(data['action']) - self.rule_id: int = int(data['rule_id']) - self.rule_trigger_type: AutoModRuleTriggerType = try_enum(AutoModRuleTriggerType, data['rule_trigger_type']) - self.guild_id: int = int(data['guild_id']) - self.channel_id: Optional[int] = utils._get_as_snowflake(data, 'channel_id') - self.user_id: int = int(data['user_id']) - self.alert_system_message_id: Optional[int] = utils._get_as_snowflake(data, 'alert_system_message_id') - self.content: str = data.get('content', '') - self.matched_keyword: Optional[str] = data['matched_keyword'] - self.matched_content: Optional[str] = data.get('matched_content') - - def __repr__(self) -> str: - return f'' - - @property - def guild(self) -> Guild: - """:class:`Guild`: The guild this action was taken in.""" - return self._state._get_or_create_unavailable_guild(self.guild_id) - - @property - def channel(self) -> Optional[Union[GuildChannel, Thread]]: - """Optional[Union[:class:`abc.GuildChannel`, :class:`Thread`]]: The channel this action was taken in.""" - if self.channel_id: - return self.guild.get_channel_or_thread(self.channel_id) - return None - - @property - def member(self) -> Optional[Member]: - """Optional[:class:`Member`]: The member this action was taken against /who triggered this rule.""" - return self.guild.get_member(self.user_id) - - async def fetch_rule(self) -> AutoModRule: - """|coro| - - Fetch the rule whose action was taken. - - You must have :attr:`Permissions.manage_guild` to do this. - - Raises - ------- - Forbidden - You do not have permissions to view the rule. - HTTPException - Fetching the rule failed. - - Returns - -------- - :class:`AutoModRule` - The rule that was executed. - """ - - data = await self._state.http.get_auto_moderation_rule(self.guild.id, self.rule_id) - return AutoModRule(data=data, guild=self.guild, state=self._state) diff --git a/.venv/Lib/site-packages/discord/backoff.py b/.venv/Lib/site-packages/discord/backoff.py deleted file mode 100644 index cfb93ad..0000000 --- a/.venv/Lib/site-packages/discord/backoff.py +++ /dev/null @@ -1,108 +0,0 @@ -""" -The MIT License (MIT) - -Copyright (c) 2015-present Rapptz - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the "Software"), -to deal in the Software without restriction, including without limitation -the rights to use, copy, modify, merge, publish, distribute, sublicense, -and/or sell copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. -""" - -from __future__ import annotations - - -import time -import random -from typing import Callable, Generic, Literal, TypeVar, overload, Union - -T = TypeVar('T', bool, Literal[True], Literal[False]) - -# fmt: off -__all__ = ( - 'ExponentialBackoff', -) -# fmt: on - - -class ExponentialBackoff(Generic[T]): - """An implementation of the exponential backoff algorithm - - Provides a convenient interface to implement an exponential backoff - for reconnecting or retrying transmissions in a distributed network. - - Once instantiated, the delay method will return the next interval to - wait for when retrying a connection or transmission. The maximum - delay increases exponentially with each retry up to a maximum of - 2^10 * base, and is reset if no more attempts are needed in a period - of 2^11 * base seconds. - - Parameters - ---------- - base: :class:`int` - The base delay in seconds. The first retry-delay will be up to - this many seconds. - integral: :class:`bool` - Set to ``True`` if whole periods of base is desirable, otherwise any - number in between may be returned. - """ - - def __init__(self, base: int = 1, *, integral: T = False): - self._base: int = base - - self._exp: int = 0 - self._max: int = 10 - self._reset_time: int = base * 2**11 - self._last_invocation: float = time.monotonic() - - # Use our own random instance to avoid messing with global one - rand = random.Random() - rand.seed() - - self._randfunc: Callable[..., Union[int, float]] = rand.randrange if integral else rand.uniform - - @overload - def delay(self: ExponentialBackoff[Literal[False]]) -> float: - ... - - @overload - def delay(self: ExponentialBackoff[Literal[True]]) -> int: - ... - - @overload - def delay(self: ExponentialBackoff[bool]) -> Union[int, float]: - ... - - def delay(self) -> Union[int, float]: - """Compute the next delay - - Returns the next delay to wait according to the exponential - backoff algorithm. This is a value between 0 and base * 2^exp - where exponent starts off at 1 and is incremented at every - invocation of this method up to a maximum of 10. - - If a period of more than base * 2^11 has passed since the last - retry, the exponent is reset to 1. - """ - invocation = time.monotonic() - interval = invocation - self._last_invocation - self._last_invocation = invocation - - if interval > self._reset_time: - self._exp = 0 - - self._exp = min(self._exp + 1, self._max) - return self._randfunc(0, self._base * 2**self._exp) diff --git a/.venv/Lib/site-packages/discord/bin/libopus-0.x64.dll b/.venv/Lib/site-packages/discord/bin/libopus-0.x64.dll deleted file mode 100644 index 74a8e35..0000000 Binary files a/.venv/Lib/site-packages/discord/bin/libopus-0.x64.dll and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/bin/libopus-0.x86.dll b/.venv/Lib/site-packages/discord/bin/libopus-0.x86.dll deleted file mode 100644 index ee71317..0000000 Binary files a/.venv/Lib/site-packages/discord/bin/libopus-0.x86.dll and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/channel.py b/.venv/Lib/site-packages/discord/channel.py deleted file mode 100644 index 8c212c0..0000000 --- a/.venv/Lib/site-packages/discord/channel.py +++ /dev/null @@ -1,3332 +0,0 @@ -""" -The MIT License (MIT) - -Copyright (c) 2015-present Rapptz - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the "Software"), -to deal in the Software without restriction, including without limitation -the rights to use, copy, modify, merge, publish, distribute, sublicense, -and/or sell copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. -""" - -from __future__ import annotations - -from typing import ( - Any, - AsyncIterator, - Callable, - Dict, - Iterable, - List, - Literal, - Mapping, - NamedTuple, - Optional, - TYPE_CHECKING, - Sequence, - Tuple, - TypeVar, - Union, - overload, -) -import datetime - -import discord.abc -from .scheduled_event import ScheduledEvent -from .permissions import PermissionOverwrite, Permissions -from .enums import ChannelType, ForumLayoutType, ForumOrderType, PrivacyLevel, try_enum, VideoQualityMode, EntityType -from .mixins import Hashable -from . import utils -from .utils import MISSING -from .asset import Asset -from .errors import ClientException -from .stage_instance import StageInstance -from .threads import Thread -from .partial_emoji import _EmojiTag, PartialEmoji -from .flags import ChannelFlags -from .http import handle_message_parameters - -__all__ = ( - 'TextChannel', - 'VoiceChannel', - 'StageChannel', - 'DMChannel', - 'CategoryChannel', - 'ForumTag', - 'ForumChannel', - 'GroupChannel', - 'PartialMessageable', -) - -if TYPE_CHECKING: - from typing_extensions import Self - - from .types.threads import ThreadArchiveDuration - from .role import Role - from .object import Object - from .member import Member, VoiceState - from .abc import Snowflake, SnowflakeTime - from .embeds import Embed - from .message import Message, PartialMessage, EmojiInputType - from .mentions import AllowedMentions - from .webhook import Webhook - from .state import ConnectionState - from .sticker import GuildSticker, StickerItem - from .file import File - from .user import ClientUser, User, BaseUser - from .guild import Guild, GuildChannel as GuildChannelType - from .ui.view import View - from .types.channel import ( - TextChannel as TextChannelPayload, - NewsChannel as NewsChannelPayload, - VoiceChannel as VoiceChannelPayload, - StageChannel as StageChannelPayload, - DMChannel as DMChannelPayload, - CategoryChannel as CategoryChannelPayload, - GroupDMChannel as GroupChannelPayload, - ForumChannel as ForumChannelPayload, - ForumTag as ForumTagPayload, - ) - from .types.snowflake import SnowflakeList - - OverwriteKeyT = TypeVar('OverwriteKeyT', Role, BaseUser, Object, Union[Role, Member, Object]) - - -class ThreadWithMessage(NamedTuple): - thread: Thread - message: Message - - -class TextChannel(discord.abc.Messageable, discord.abc.GuildChannel, Hashable): - """Represents a Discord guild text channel. - - .. container:: operations - - .. describe:: x == y - - Checks if two channels are equal. - - .. describe:: x != y - - Checks if two channels are not equal. - - .. describe:: hash(x) - - Returns the channel's hash. - - .. describe:: str(x) - - Returns the channel's name. - - Attributes - ----------- - name: :class:`str` - The channel name. - guild: :class:`Guild` - The guild the channel belongs to. - id: :class:`int` - The channel ID. - category_id: Optional[:class:`int`] - The category channel ID this channel belongs to, if applicable. - topic: Optional[:class:`str`] - The channel's topic. ``None`` if it doesn't exist. - position: :class:`int` - The position in the channel list. This is a number that starts at 0. e.g. the - top channel is position 0. - last_message_id: Optional[:class:`int`] - The last message ID of the message sent to this channel. It may - *not* point to an existing or valid message. - slowmode_delay: :class:`int` - The number of seconds a member must wait between sending messages - in this channel. A value of ``0`` denotes that it is disabled. - Bots and users with :attr:`~Permissions.manage_channels` or - :attr:`~Permissions.manage_messages` bypass slowmode. - nsfw: :class:`bool` - If the channel is marked as "not safe for work" or "age restricted". - default_auto_archive_duration: :class:`int` - The default auto archive duration in minutes for threads created in this channel. - - .. versionadded:: 2.0 - default_thread_slowmode_delay: :class:`int` - The default slowmode delay in seconds for threads created in this channel. - - .. versionadded:: 2.3 - """ - - __slots__ = ( - 'name', - 'id', - 'guild', - 'topic', - '_state', - 'nsfw', - 'category_id', - 'position', - 'slowmode_delay', - '_overwrites', - '_type', - 'last_message_id', - 'default_auto_archive_duration', - 'default_thread_slowmode_delay', - ) - - def __init__(self, *, state: ConnectionState, guild: Guild, data: Union[TextChannelPayload, NewsChannelPayload]): - self._state: ConnectionState = state - self.id: int = int(data['id']) - self._type: Literal[0, 5] = data['type'] - self._update(guild, data) - - def __repr__(self) -> str: - attrs = [ - ('id', self.id), - ('name', self.name), - ('position', self.position), - ('nsfw', self.nsfw), - ('news', self.is_news()), - ('category_id', self.category_id), - ] - joined = ' '.join('%s=%r' % t for t in attrs) - return f'<{self.__class__.__name__} {joined}>' - - def _update(self, guild: Guild, data: Union[TextChannelPayload, NewsChannelPayload]) -> None: - self.guild: Guild = guild - self.name: str = data['name'] - self.category_id: Optional[int] = utils._get_as_snowflake(data, 'parent_id') - self.topic: Optional[str] = data.get('topic') - self.position: int = data['position'] - self.nsfw: bool = data.get('nsfw', False) - # Does this need coercion into `int`? No idea yet. - self.slowmode_delay: int = data.get('rate_limit_per_user', 0) - self.default_auto_archive_duration: ThreadArchiveDuration = data.get('default_auto_archive_duration', 1440) - self.default_thread_slowmode_delay: int = data.get('default_thread_rate_limit_per_user', 0) - self._type: Literal[0, 5] = data.get('type', self._type) - self.last_message_id: Optional[int] = utils._get_as_snowflake(data, 'last_message_id') - self._fill_overwrites(data) - - async def _get_channel(self) -> Self: - return self - - @property - def type(self) -> Literal[ChannelType.text, ChannelType.news]: - """:class:`ChannelType`: The channel's Discord type.""" - if self._type == 0: - return ChannelType.text - return ChannelType.news - - @property - def _sorting_bucket(self) -> int: - return ChannelType.text.value - - @property - def _scheduled_event_entity_type(self) -> Optional[EntityType]: - return None - - @utils.copy_doc(discord.abc.GuildChannel.permissions_for) - def permissions_for(self, obj: Union[Member, Role], /) -> Permissions: - base = super().permissions_for(obj) - self._apply_implicit_permissions(base) - - # text channels do not have voice related permissions - denied = Permissions.voice() - base.value &= ~denied.value - return base - - @property - def members(self) -> List[Member]: - """List[:class:`Member`]: Returns all members that can see this channel.""" - return [m for m in self.guild.members if self.permissions_for(m).read_messages] - - @property - def threads(self) -> List[Thread]: - """List[:class:`Thread`]: Returns all the threads that you can see. - - .. versionadded:: 2.0 - """ - return [thread for thread in self.guild._threads.values() if thread.parent_id == self.id] - - def is_nsfw(self) -> bool: - """:class:`bool`: Checks if the channel is NSFW.""" - return self.nsfw - - def is_news(self) -> bool: - """:class:`bool`: Checks if the channel is a news channel.""" - return self._type == ChannelType.news.value - - @property - def last_message(self) -> Optional[Message]: - """Retrieves the last message from this channel in cache. - - The message might not be valid or point to an existing message. - - .. admonition:: Reliable Fetching - :class: helpful - - For a slightly more reliable method of fetching the - last message, consider using either :meth:`history` - or :meth:`fetch_message` with the :attr:`last_message_id` - attribute. - - Returns - --------- - Optional[:class:`Message`] - The last message in this channel or ``None`` if not found. - """ - return self._state._get_message(self.last_message_id) if self.last_message_id else None - - @overload - async def edit(self) -> Optional[TextChannel]: - ... - - @overload - async def edit(self, *, position: int, reason: Optional[str] = ...) -> None: - ... - - @overload - async def edit( - self, - *, - reason: Optional[str] = ..., - name: str = ..., - topic: str = ..., - position: int = ..., - nsfw: bool = ..., - sync_permissions: bool = ..., - category: Optional[CategoryChannel] = ..., - slowmode_delay: int = ..., - default_auto_archive_duration: ThreadArchiveDuration = ..., - default_thread_slowmode_delay: int = ..., - type: ChannelType = ..., - overwrites: Mapping[OverwriteKeyT, PermissionOverwrite] = ..., - ) -> TextChannel: - ... - - async def edit(self, *, reason: Optional[str] = None, **options: Any) -> Optional[TextChannel]: - """|coro| - - Edits the channel. - - You must have :attr:`~Permissions.manage_channels` to do this. - - .. versionchanged:: 1.3 - The ``overwrites`` keyword-only parameter was added. - - .. versionchanged:: 1.4 - The ``type`` keyword-only parameter was added. - - .. versionchanged:: 2.0 - Edits are no longer in-place, the newly edited channel is returned instead. - - .. versionchanged:: 2.0 - This function will now raise :exc:`TypeError` or - :exc:`ValueError` instead of ``InvalidArgument``. - - Parameters - ---------- - name: :class:`str` - The new channel name. - topic: :class:`str` - The new channel's topic. - position: :class:`int` - The new channel's position. - nsfw: :class:`bool` - To mark the channel as NSFW or not. - sync_permissions: :class:`bool` - Whether to sync permissions with the channel's new or pre-existing - category. Defaults to ``False``. - category: Optional[:class:`CategoryChannel`] - The new category for this channel. Can be ``None`` to remove the - category. - slowmode_delay: :class:`int` - Specifies the slowmode rate limit for user in this channel, in seconds. - A value of ``0`` disables slowmode. The maximum value possible is ``21600``. - type: :class:`ChannelType` - Change the type of this text channel. Currently, only conversion between - :attr:`ChannelType.text` and :attr:`ChannelType.news` is supported. This - is only available to guilds that contain ``NEWS`` in :attr:`Guild.features`. - reason: Optional[:class:`str`] - The reason for editing this channel. Shows up on the audit log. - overwrites: :class:`Mapping` - A :class:`Mapping` of target (either a role or a member) to - :class:`PermissionOverwrite` to apply to the channel. - default_auto_archive_duration: :class:`int` - The new default auto archive duration in minutes for threads created in this channel. - Must be one of ``60``, ``1440``, ``4320``, or ``10080``. - - .. versionadded:: 2.0 - default_thread_slowmode_delay: :class:`int` - The new default slowmode delay in seconds for threads created in this channel. - - .. versionadded:: 2.3 - Raises - ------ - ValueError - The new ``position`` is less than 0 or greater than the number of channels. - TypeError - The permission overwrite information is not in proper form. - Forbidden - You do not have permissions to edit the channel. - HTTPException - Editing the channel failed. - - Returns - -------- - Optional[:class:`.TextChannel`] - The newly edited text channel. If the edit was only positional - then ``None`` is returned instead. - """ - - payload = await self._edit(options, reason=reason) - if payload is not None: - # the payload will always be the proper channel payload - return self.__class__(state=self._state, guild=self.guild, data=payload) # type: ignore - - @utils.copy_doc(discord.abc.GuildChannel.clone) - async def clone(self, *, name: Optional[str] = None, reason: Optional[str] = None) -> TextChannel: - return await self._clone_impl( - {'topic': self.topic, 'nsfw': self.nsfw, 'rate_limit_per_user': self.slowmode_delay}, name=name, reason=reason - ) - - async def delete_messages(self, messages: Iterable[Snowflake], *, reason: Optional[str] = None) -> None: - """|coro| - - Deletes a list of messages. This is similar to :meth:`Message.delete` - except it bulk deletes multiple messages. - - As a special case, if the number of messages is 0, then nothing - is done. If the number of messages is 1 then single message - delete is done. If it's more than two, then bulk delete is used. - - You cannot bulk delete more than 100 messages or messages that - are older than 14 days old. - - You must have :attr:`~Permissions.manage_messages` to do this. - - .. versionchanged:: 2.0 - - ``messages`` parameter is now positional-only. - - The ``reason`` keyword-only parameter was added. - - Parameters - ----------- - messages: Iterable[:class:`abc.Snowflake`] - An iterable of messages denoting which ones to bulk delete. - reason: Optional[:class:`str`] - The reason for deleting the messages. Shows up on the audit log. - - Raises - ------ - ClientException - The number of messages to delete was more than 100. - Forbidden - You do not have proper permissions to delete the messages. - NotFound - If single delete, then the message was already deleted. - HTTPException - Deleting the messages failed. - """ - if not isinstance(messages, (list, tuple)): - messages = list(messages) - - if len(messages) == 0: - return # do nothing - - if len(messages) == 1: - message_id: int = messages[0].id - await self._state.http.delete_message(self.id, message_id) - return - - if len(messages) > 100: - raise ClientException('Can only bulk delete messages up to 100 messages') - - message_ids: SnowflakeList = [m.id for m in messages] - await self._state.http.delete_messages(self.id, message_ids, reason=reason) - - async def purge( - self, - *, - limit: Optional[int] = 100, - check: Callable[[Message], bool] = MISSING, - before: Optional[SnowflakeTime] = None, - after: Optional[SnowflakeTime] = None, - around: Optional[SnowflakeTime] = None, - oldest_first: Optional[bool] = None, - bulk: bool = True, - reason: Optional[str] = None, - ) -> List[Message]: - """|coro| - - Purges a list of messages that meet the criteria given by the predicate - ``check``. If a ``check`` is not provided then all messages are deleted - without discrimination. - - You must have :attr:`~Permissions.manage_messages` to - delete messages even if they are your own. - Having :attr:`~Permissions.read_message_history` is - also needed to retrieve message history. - - .. versionchanged:: 2.0 - - The ``reason`` keyword-only parameter was added. - - Examples - --------- - - Deleting bot's messages :: - - def is_me(m): - return m.author == client.user - - deleted = await channel.purge(limit=100, check=is_me) - await channel.send(f'Deleted {len(deleted)} message(s)') - - Parameters - ----------- - limit: Optional[:class:`int`] - The number of messages to search through. This is not the number - of messages that will be deleted, though it can be. - check: Callable[[:class:`Message`], :class:`bool`] - The function used to check if a message should be deleted. - It must take a :class:`Message` as its sole parameter. - before: Optional[Union[:class:`abc.Snowflake`, :class:`datetime.datetime`]] - Same as ``before`` in :meth:`history`. - after: Optional[Union[:class:`abc.Snowflake`, :class:`datetime.datetime`]] - Same as ``after`` in :meth:`history`. - around: Optional[Union[:class:`abc.Snowflake`, :class:`datetime.datetime`]] - Same as ``around`` in :meth:`history`. - oldest_first: Optional[:class:`bool`] - Same as ``oldest_first`` in :meth:`history`. - bulk: :class:`bool` - If ``True``, use bulk delete. Setting this to ``False`` is useful for mass-deleting - a bot's own messages without :attr:`Permissions.manage_messages`. When ``True``, will - fall back to single delete if messages are older than two weeks. - reason: Optional[:class:`str`] - The reason for purging the messages. Shows up on the audit log. - - Raises - ------- - Forbidden - You do not have proper permissions to do the actions required. - HTTPException - Purging the messages failed. - - Returns - -------- - List[:class:`.Message`] - The list of messages that were deleted. - """ - return await discord.abc._purge_helper( - self, - limit=limit, - check=check, - before=before, - after=after, - around=around, - oldest_first=oldest_first, - bulk=bulk, - reason=reason, - ) - - async def webhooks(self) -> List[Webhook]: - """|coro| - - Gets the list of webhooks from this channel. - - You must have :attr:`~.Permissions.manage_webhooks` to do this. - - Raises - ------- - Forbidden - You don't have permissions to get the webhooks. - - Returns - -------- - List[:class:`Webhook`] - The webhooks for this channel. - """ - - from .webhook import Webhook - - data = await self._state.http.channel_webhooks(self.id) - return [Webhook.from_state(d, state=self._state) for d in data] - - async def create_webhook(self, *, name: str, avatar: Optional[bytes] = None, reason: Optional[str] = None) -> Webhook: - """|coro| - - Creates a webhook for this channel. - - You must have :attr:`~.Permissions.manage_webhooks` to do this. - - .. versionchanged:: 1.1 - Added the ``reason`` keyword-only parameter. - - Parameters - ------------- - name: :class:`str` - The webhook's name. - avatar: Optional[:class:`bytes`] - A :term:`py:bytes-like object` representing the webhook's default avatar. - This operates similarly to :meth:`~ClientUser.edit`. - reason: Optional[:class:`str`] - The reason for creating this webhook. Shows up in the audit logs. - - Raises - ------- - HTTPException - Creating the webhook failed. - Forbidden - You do not have permissions to create a webhook. - - Returns - -------- - :class:`Webhook` - The created webhook. - """ - - from .webhook import Webhook - - if avatar is not None: - avatar = utils._bytes_to_base64_data(avatar) # type: ignore # Silence reassignment error - - data = await self._state.http.create_webhook(self.id, name=str(name), avatar=avatar, reason=reason) - return Webhook.from_state(data, state=self._state) - - async def follow(self, *, destination: TextChannel, reason: Optional[str] = None) -> Webhook: - """|coro| - - Follows a channel using a webhook. - - Only news channels can be followed. - - .. note:: - - The webhook returned will not provide a token to do webhook - actions, as Discord does not provide it. - - .. versionadded:: 1.3 - - .. versionchanged:: 2.0 - This function will now raise :exc:`TypeError` instead of - ``InvalidArgument``. - - Parameters - ----------- - destination: :class:`TextChannel` - The channel you would like to follow from. - reason: Optional[:class:`str`] - The reason for following the channel. Shows up on the destination guild's audit log. - - .. versionadded:: 1.4 - - Raises - ------- - HTTPException - Following the channel failed. - Forbidden - You do not have the permissions to create a webhook. - ClientException - The channel is not a news channel. - TypeError - The destination channel is not a text channel. - - Returns - -------- - :class:`Webhook` - The created webhook. - """ - - if not self.is_news(): - raise ClientException('The channel must be a news channel.') - - if not isinstance(destination, TextChannel): - raise TypeError(f'Expected TextChannel received {destination.__class__.__name__}') - - from .webhook import Webhook - - data = await self._state.http.follow_webhook(self.id, webhook_channel_id=destination.id, reason=reason) - return Webhook._as_follower(data, channel=destination, user=self._state.user) - - def get_partial_message(self, message_id: int, /) -> PartialMessage: - """Creates a :class:`PartialMessage` from the message ID. - - This is useful if you want to work with a message and only have its ID without - doing an unnecessary API call. - - .. versionadded:: 1.6 - - .. versionchanged:: 2.0 - - ``message_id`` parameter is now positional-only. - - Parameters - ------------ - message_id: :class:`int` - The message ID to create a partial message for. - - Returns - --------- - :class:`PartialMessage` - The partial message. - """ - - from .message import PartialMessage - - return PartialMessage(channel=self, id=message_id) - - def get_thread(self, thread_id: int, /) -> Optional[Thread]: - """Returns a thread with the given ID. - - .. note:: - - This does not always retrieve archived threads, as they are not retained in the internal - cache. Use :func:`Guild.fetch_channel` instead. - - .. versionadded:: 2.0 - - Parameters - ----------- - thread_id: :class:`int` - The ID to search for. - - Returns - -------- - Optional[:class:`Thread`] - The returned thread or ``None`` if not found. - """ - return self.guild.get_thread(thread_id) - - async def create_thread( - self, - *, - name: str, - message: Optional[Snowflake] = None, - auto_archive_duration: ThreadArchiveDuration = MISSING, - type: Optional[ChannelType] = None, - reason: Optional[str] = None, - invitable: bool = True, - slowmode_delay: Optional[int] = None, - ) -> Thread: - """|coro| - - Creates a thread in this text channel. - - To create a public thread, you must have :attr:`~discord.Permissions.create_public_threads`. - For a private thread, :attr:`~discord.Permissions.create_private_threads` is needed instead. - - .. versionadded:: 2.0 - - Parameters - ----------- - name: :class:`str` - The name of the thread. - message: Optional[:class:`abc.Snowflake`] - A snowflake representing the message to create the thread with. - If ``None`` is passed then a private thread is created. - Defaults to ``None``. - auto_archive_duration: :class:`int` - The duration in minutes before a thread is automatically hidden from the channel list. - If not provided, the channel's default auto archive duration is used. - - Must be one of ``60``, ``1440``, ``4320``, or ``10080``, if provided. - type: Optional[:class:`ChannelType`] - The type of thread to create. If a ``message`` is passed then this parameter - is ignored, as a thread created with a message is always a public thread. - By default this creates a private thread if this is ``None``. - reason: :class:`str` - The reason for creating a new thread. Shows up on the audit log. - invitable: :class:`bool` - Whether non-moderators can add users to the thread. Only applicable to private threads. - Defaults to ``True``. - slowmode_delay: Optional[:class:`int`] - Specifies the slowmode rate limit for user in this channel, in seconds. - The maximum value possible is ``21600``. By default no slowmode rate limit - if this is ``None``. - - Raises - ------- - Forbidden - You do not have permissions to create a thread. - HTTPException - Starting the thread failed. - - Returns - -------- - :class:`Thread` - The created thread - """ - - if type is None: - type = ChannelType.private_thread - - if message is None: - data = await self._state.http.start_thread_without_message( - self.id, - name=name, - auto_archive_duration=auto_archive_duration or self.default_auto_archive_duration, - type=type.value, - reason=reason, - invitable=invitable, - rate_limit_per_user=slowmode_delay, - ) - else: - data = await self._state.http.start_thread_with_message( - self.id, - message.id, - name=name, - auto_archive_duration=auto_archive_duration or self.default_auto_archive_duration, - reason=reason, - rate_limit_per_user=slowmode_delay, - ) - - return Thread(guild=self.guild, state=self._state, data=data) - - async def archived_threads( - self, - *, - private: bool = False, - joined: bool = False, - limit: Optional[int] = 100, - before: Optional[Union[Snowflake, datetime.datetime]] = None, - ) -> AsyncIterator[Thread]: - """Returns an :term:`asynchronous iterator` that iterates over all archived threads in this text channel, - in order of decreasing ID for joined threads, and decreasing :attr:`Thread.archive_timestamp` otherwise. - - You must have :attr:`~Permissions.read_message_history` to do this. If iterating over private threads - then :attr:`~Permissions.manage_threads` is also required. - - .. versionadded:: 2.0 - - Parameters - ----------- - limit: Optional[:class:`bool`] - The number of threads to retrieve. - If ``None``, retrieves every archived thread in the channel. Note, however, - that this would make it a slow operation. - before: Optional[Union[:class:`abc.Snowflake`, :class:`datetime.datetime`]] - Retrieve archived channels before the given date or ID. - private: :class:`bool` - Whether to retrieve private archived threads. - joined: :class:`bool` - Whether to retrieve private archived threads that you've joined. - You cannot set ``joined`` to ``True`` and ``private`` to ``False``. - - Raises - ------ - Forbidden - You do not have permissions to get archived threads. - HTTPException - The request to get the archived threads failed. - ValueError - ``joined`` was set to ``True`` and ``private`` was set to ``False``. You cannot retrieve public archived - threads that you have joined. - - Yields - ------- - :class:`Thread` - The archived threads. - """ - if joined and not private: - raise ValueError('Cannot retrieve joined public archived threads') - - before_timestamp = None - - if isinstance(before, datetime.datetime): - if joined: - before_timestamp = str(utils.time_snowflake(before, high=False)) - else: - before_timestamp = before.isoformat() - elif before is not None: - if joined: - before_timestamp = str(before.id) - else: - before_timestamp = utils.snowflake_time(before.id).isoformat() - - update_before = lambda data: data['thread_metadata']['archive_timestamp'] - endpoint = self.guild._state.http.get_public_archived_threads - - if joined: - update_before = lambda data: data['id'] - endpoint = self.guild._state.http.get_joined_private_archived_threads - elif private: - endpoint = self.guild._state.http.get_private_archived_threads - - while True: - retrieve = 100 - if limit is not None: - if limit <= 0: - return - retrieve = max(2, min(retrieve, limit)) - - data = await endpoint(self.id, before=before_timestamp, limit=retrieve) - - threads = data.get('threads', []) - for raw_thread in threads: - yield Thread(guild=self.guild, state=self.guild._state, data=raw_thread) - # Currently the API doesn't let you request less than 2 threads. - # Bail out early if we had to retrieve more than what the limit was. - if limit is not None: - limit -= 1 - if limit <= 0: - return - - if not data.get('has_more', False): - return - - before_timestamp = update_before(threads[-1]) - - -class VocalGuildChannel(discord.abc.Messageable, discord.abc.Connectable, discord.abc.GuildChannel, Hashable): - __slots__ = ( - 'name', - 'id', - 'guild', - 'nsfw', - 'bitrate', - 'user_limit', - '_state', - 'position', - 'slowmode_delay', - '_overwrites', - 'category_id', - 'rtc_region', - 'video_quality_mode', - 'last_message_id', - ) - - def __init__(self, *, state: ConnectionState, guild: Guild, data: Union[VoiceChannelPayload, StageChannelPayload]): - self._state: ConnectionState = state - self.id: int = int(data['id']) - self._update(guild, data) - - async def _get_channel(self) -> Self: - return self - - def _get_voice_client_key(self) -> Tuple[int, str]: - return self.guild.id, 'guild_id' - - def _get_voice_state_pair(self) -> Tuple[int, int]: - return self.guild.id, self.id - - def _update(self, guild: Guild, data: Union[VoiceChannelPayload, StageChannelPayload]) -> None: - self.guild: Guild = guild - self.name: str = data['name'] - self.nsfw: bool = data.get('nsfw', False) - self.rtc_region: Optional[str] = data.get('rtc_region') - self.video_quality_mode: VideoQualityMode = try_enum(VideoQualityMode, data.get('video_quality_mode', 1)) - self.category_id: Optional[int] = utils._get_as_snowflake(data, 'parent_id') - self.last_message_id: Optional[int] = utils._get_as_snowflake(data, 'last_message_id') - self.position: int = data['position'] - self.slowmode_delay = data.get('rate_limit_per_user', 0) - self.bitrate: int = data['bitrate'] - self.user_limit: int = data['user_limit'] - self._fill_overwrites(data) - - @property - def _sorting_bucket(self) -> int: - return ChannelType.voice.value - - def is_nsfw(self) -> bool: - """:class:`bool`: Checks if the channel is NSFW. - - .. versionadded:: 2.0 - """ - return self.nsfw - - @property - def members(self) -> List[Member]: - """List[:class:`Member`]: Returns all members that are currently inside this voice channel.""" - ret = [] - for user_id, state in self.guild._voice_states.items(): - if state.channel and state.channel.id == self.id: - member = self.guild.get_member(user_id) - if member is not None: - ret.append(member) - return ret - - @property - def voice_states(self) -> Dict[int, VoiceState]: - """Returns a mapping of member IDs who have voice states in this channel. - - .. versionadded:: 1.3 - - .. note:: - - This function is intentionally low level to replace :attr:`members` - when the member cache is unavailable. - - Returns - -------- - Mapping[:class:`int`, :class:`VoiceState`] - The mapping of member ID to a voice state. - """ - # fmt: off - return { - key: value - for key, value in self.guild._voice_states.items() - if value.channel and value.channel.id == self.id - } - # fmt: on - - @property - def scheduled_events(self) -> List[ScheduledEvent]: - """List[:class:`ScheduledEvent`]: Returns all scheduled events for this channel. - - .. versionadded:: 2.0 - """ - return [event for event in self.guild.scheduled_events if event.channel_id == self.id] - - @utils.copy_doc(discord.abc.GuildChannel.permissions_for) - def permissions_for(self, obj: Union[Member, Role], /) -> Permissions: - base = super().permissions_for(obj) - self._apply_implicit_permissions(base) - - # voice channels cannot be edited by people who can't connect to them - # It also implicitly denies all other voice perms - if not base.connect: - denied = Permissions.voice() - denied.update(manage_channels=True, manage_roles=True) - base.value &= ~denied.value - return base - - @property - def last_message(self) -> Optional[Message]: - """Retrieves the last message from this channel in cache. - - The message might not be valid or point to an existing message. - - .. versionadded:: 2.0 - - .. admonition:: Reliable Fetching - :class: helpful - - For a slightly more reliable method of fetching the - last message, consider using either :meth:`history` - or :meth:`fetch_message` with the :attr:`last_message_id` - attribute. - - Returns - --------- - Optional[:class:`Message`] - The last message in this channel or ``None`` if not found. - """ - return self._state._get_message(self.last_message_id) if self.last_message_id else None - - def get_partial_message(self, message_id: int, /) -> PartialMessage: - """Creates a :class:`PartialMessage` from the message ID. - - This is useful if you want to work with a message and only have its ID without - doing an unnecessary API call. - - .. versionadded:: 2.0 - - Parameters - ------------ - message_id: :class:`int` - The message ID to create a partial message for. - - Returns - --------- - :class:`PartialMessage` - The partial message. - """ - - from .message import PartialMessage - - return PartialMessage(channel=self, id=message_id) # type: ignore # VocalGuildChannel is an impl detail - - async def delete_messages(self, messages: Iterable[Snowflake], *, reason: Optional[str] = None) -> None: - """|coro| - - Deletes a list of messages. This is similar to :meth:`Message.delete` - except it bulk deletes multiple messages. - - As a special case, if the number of messages is 0, then nothing - is done. If the number of messages is 1 then single message - delete is done. If it's more than two, then bulk delete is used. - - You cannot bulk delete more than 100 messages or messages that - are older than 14 days old. - - You must have :attr:`~Permissions.manage_messages` to do this. - - .. versionadded:: 2.0 - - Parameters - ----------- - messages: Iterable[:class:`abc.Snowflake`] - An iterable of messages denoting which ones to bulk delete. - reason: Optional[:class:`str`] - The reason for deleting the messages. Shows up on the audit log. - - Raises - ------ - ClientException - The number of messages to delete was more than 100. - Forbidden - You do not have proper permissions to delete the messages. - NotFound - If single delete, then the message was already deleted. - HTTPException - Deleting the messages failed. - """ - if not isinstance(messages, (list, tuple)): - messages = list(messages) - - if len(messages) == 0: - return # do nothing - - if len(messages) == 1: - message_id: int = messages[0].id - await self._state.http.delete_message(self.id, message_id) - return - - if len(messages) > 100: - raise ClientException('Can only bulk delete messages up to 100 messages') - - message_ids: SnowflakeList = [m.id for m in messages] - await self._state.http.delete_messages(self.id, message_ids, reason=reason) - - async def purge( - self, - *, - limit: Optional[int] = 100, - check: Callable[[Message], bool] = MISSING, - before: Optional[SnowflakeTime] = None, - after: Optional[SnowflakeTime] = None, - around: Optional[SnowflakeTime] = None, - oldest_first: Optional[bool] = None, - bulk: bool = True, - reason: Optional[str] = None, - ) -> List[Message]: - """|coro| - - Purges a list of messages that meet the criteria given by the predicate - ``check``. If a ``check`` is not provided then all messages are deleted - without discrimination. - - You must have :attr:`~Permissions.manage_messages` to - delete messages even if they are your own. - Having :attr:`~Permissions.read_message_history` is - also needed to retrieve message history. - - .. versionadded:: 2.0 - - Examples - --------- - - Deleting bot's messages :: - - def is_me(m): - return m.author == client.user - - deleted = await channel.purge(limit=100, check=is_me) - await channel.send(f'Deleted {len(deleted)} message(s)') - - Parameters - ----------- - limit: Optional[:class:`int`] - The number of messages to search through. This is not the number - of messages that will be deleted, though it can be. - check: Callable[[:class:`Message`], :class:`bool`] - The function used to check if a message should be deleted. - It must take a :class:`Message` as its sole parameter. - before: Optional[Union[:class:`abc.Snowflake`, :class:`datetime.datetime`]] - Same as ``before`` in :meth:`history`. - after: Optional[Union[:class:`abc.Snowflake`, :class:`datetime.datetime`]] - Same as ``after`` in :meth:`history`. - around: Optional[Union[:class:`abc.Snowflake`, :class:`datetime.datetime`]] - Same as ``around`` in :meth:`history`. - oldest_first: Optional[:class:`bool`] - Same as ``oldest_first`` in :meth:`history`. - bulk: :class:`bool` - If ``True``, use bulk delete. Setting this to ``False`` is useful for mass-deleting - a bot's own messages without :attr:`Permissions.manage_messages`. When ``True``, will - fall back to single delete if messages are older than two weeks. - reason: Optional[:class:`str`] - The reason for purging the messages. Shows up on the audit log. - - Raises - ------- - Forbidden - You do not have proper permissions to do the actions required. - HTTPException - Purging the messages failed. - - Returns - -------- - List[:class:`.Message`] - The list of messages that were deleted. - """ - - return await discord.abc._purge_helper( - self, - limit=limit, - check=check, - before=before, - after=after, - around=around, - oldest_first=oldest_first, - bulk=bulk, - reason=reason, - ) - - async def webhooks(self) -> List[Webhook]: - """|coro| - - Gets the list of webhooks from this channel. - - You must have :attr:`~.Permissions.manage_webhooks` to do this. - - .. versionadded:: 2.0 - - Raises - ------- - Forbidden - You don't have permissions to get the webhooks. - - Returns - -------- - List[:class:`Webhook`] - The webhooks for this channel. - """ - - from .webhook import Webhook - - data = await self._state.http.channel_webhooks(self.id) - return [Webhook.from_state(d, state=self._state) for d in data] - - async def create_webhook(self, *, name: str, avatar: Optional[bytes] = None, reason: Optional[str] = None) -> Webhook: - """|coro| - - Creates a webhook for this channel. - - You must have :attr:`~.Permissions.manage_webhooks` to do this. - - .. versionadded:: 2.0 - - Parameters - ------------- - name: :class:`str` - The webhook's name. - avatar: Optional[:class:`bytes`] - A :term:`py:bytes-like object` representing the webhook's default avatar. - This operates similarly to :meth:`~ClientUser.edit`. - reason: Optional[:class:`str`] - The reason for creating this webhook. Shows up in the audit logs. - - Raises - ------- - HTTPException - Creating the webhook failed. - Forbidden - You do not have permissions to create a webhook. - - Returns - -------- - :class:`Webhook` - The created webhook. - """ - - from .webhook import Webhook - - if avatar is not None: - avatar = utils._bytes_to_base64_data(avatar) # type: ignore # Silence reassignment error - - data = await self._state.http.create_webhook(self.id, name=str(name), avatar=avatar, reason=reason) - return Webhook.from_state(data, state=self._state) - - -class VoiceChannel(VocalGuildChannel): - """Represents a Discord guild voice channel. - - .. container:: operations - - .. describe:: x == y - - Checks if two channels are equal. - - .. describe:: x != y - - Checks if two channels are not equal. - - .. describe:: hash(x) - - Returns the channel's hash. - - .. describe:: str(x) - - Returns the channel's name. - - Attributes - ----------- - name: :class:`str` - The channel name. - guild: :class:`Guild` - The guild the channel belongs to. - id: :class:`int` - The channel ID. - nsfw: :class:`bool` - If the channel is marked as "not safe for work" or "age restricted". - - .. versionadded:: 2.0 - category_id: Optional[:class:`int`] - The category channel ID this channel belongs to, if applicable. - position: :class:`int` - The position in the channel list. This is a number that starts at 0. e.g. the - top channel is position 0. - bitrate: :class:`int` - The channel's preferred audio bitrate in bits per second. - user_limit: :class:`int` - The channel's limit for number of members that can be in a voice channel. - rtc_region: Optional[:class:`str`] - The region for the voice channel's voice communication. - A value of ``None`` indicates automatic voice region detection. - - .. versionadded:: 1.7 - - .. versionchanged:: 2.0 - The type of this attribute has changed to :class:`str`. - video_quality_mode: :class:`VideoQualityMode` - The camera video quality for the voice channel's participants. - - .. versionadded:: 2.0 - last_message_id: Optional[:class:`int`] - The last message ID of the message sent to this channel. It may - *not* point to an existing or valid message. - - .. versionadded:: 2.0 - slowmode_delay: :class:`int` - The number of seconds a member must wait between sending messages - in this channel. A value of ``0`` denotes that it is disabled. - Bots and users with :attr:`~Permissions.manage_channels` or - :attr:`~Permissions.manage_messages` bypass slowmode. - - .. versionadded:: 2.2 - """ - - __slots__ = () - - def __repr__(self) -> str: - attrs = [ - ('id', self.id), - ('name', self.name), - ('rtc_region', self.rtc_region), - ('position', self.position), - ('bitrate', self.bitrate), - ('video_quality_mode', self.video_quality_mode), - ('user_limit', self.user_limit), - ('category_id', self.category_id), - ] - joined = ' '.join('%s=%r' % t for t in attrs) - return f'<{self.__class__.__name__} {joined}>' - - @property - def _scheduled_event_entity_type(self) -> Optional[EntityType]: - return EntityType.voice - - @property - def type(self) -> Literal[ChannelType.voice]: - """:class:`ChannelType`: The channel's Discord type.""" - return ChannelType.voice - - @utils.copy_doc(discord.abc.GuildChannel.clone) - async def clone(self, *, name: Optional[str] = None, reason: Optional[str] = None) -> VoiceChannel: - return await self._clone_impl({'bitrate': self.bitrate, 'user_limit': self.user_limit}, name=name, reason=reason) - - @overload - async def edit(self) -> None: - ... - - @overload - async def edit(self, *, position: int, reason: Optional[str] = ...) -> None: - ... - - @overload - async def edit( - self, - *, - name: str = ..., - nsfw: bool = ..., - bitrate: int = ..., - user_limit: int = ..., - position: int = ..., - sync_permissions: int = ..., - category: Optional[CategoryChannel] = ..., - overwrites: Mapping[OverwriteKeyT, PermissionOverwrite] = ..., - rtc_region: Optional[str] = ..., - video_quality_mode: VideoQualityMode = ..., - slowmode_delay: int = ..., - reason: Optional[str] = ..., - ) -> VoiceChannel: - ... - - async def edit(self, *, reason: Optional[str] = None, **options: Any) -> Optional[VoiceChannel]: - """|coro| - - Edits the channel. - - You must have :attr:`~Permissions.manage_channels` to do this. - - .. versionchanged:: 1.3 - The ``overwrites`` keyword-only parameter was added. - - .. versionchanged:: 2.0 - Edits are no longer in-place, the newly edited channel is returned instead. - - .. versionchanged:: 2.0 - The ``region`` parameter now accepts :class:`str` instead of an enum. - - .. versionchanged:: 2.0 - This function will now raise :exc:`TypeError` instead of - ``InvalidArgument``. - - Parameters - ---------- - name: :class:`str` - The new channel's name. - bitrate: :class:`int` - The new channel's bitrate. - nsfw: :class:`bool` - To mark the channel as NSFW or not. - user_limit: :class:`int` - The new channel's user limit. - position: :class:`int` - The new channel's position. - sync_permissions: :class:`bool` - Whether to sync permissions with the channel's new or pre-existing - category. Defaults to ``False``. - category: Optional[:class:`CategoryChannel`] - The new category for this channel. Can be ``None`` to remove the - category. - slowmode_delay: :class:`int` - Specifies the slowmode rate limit for user in this channel, in seconds. - A value of ``0`` disables slowmode. The maximum value possible is ``21600``. - reason: Optional[:class:`str`] - The reason for editing this channel. Shows up on the audit log. - overwrites: :class:`Mapping` - A :class:`Mapping` of target (either a role or a member) to - :class:`PermissionOverwrite` to apply to the channel. - rtc_region: Optional[:class:`str`] - The new region for the voice channel's voice communication. - A value of ``None`` indicates automatic voice region detection. - - .. versionadded:: 1.7 - video_quality_mode: :class:`VideoQualityMode` - The camera video quality for the voice channel's participants. - - .. versionadded:: 2.0 - - Raises - ------ - TypeError - If the permission overwrite information is not in proper form. - Forbidden - You do not have permissions to edit the channel. - HTTPException - Editing the channel failed. - - Returns - -------- - Optional[:class:`.VoiceChannel`] - The newly edited voice channel. If the edit was only positional - then ``None`` is returned instead. - """ - payload = await self._edit(options, reason=reason) - if payload is not None: - # the payload will always be the proper channel payload - return self.__class__(state=self._state, guild=self.guild, data=payload) # type: ignore - - -class StageChannel(VocalGuildChannel): - """Represents a Discord guild stage channel. - - .. versionadded:: 1.7 - - .. container:: operations - - .. describe:: x == y - - Checks if two channels are equal. - - .. describe:: x != y - - Checks if two channels are not equal. - - .. describe:: hash(x) - - Returns the channel's hash. - - .. describe:: str(x) - - Returns the channel's name. - - Attributes - ----------- - name: :class:`str` - The channel name. - guild: :class:`Guild` - The guild the channel belongs to. - id: :class:`int` - The channel ID. - nsfw: :class:`bool` - If the channel is marked as "not safe for work" or "age restricted". - - .. versionadded:: 2.0 - topic: Optional[:class:`str`] - The channel's topic. ``None`` if it isn't set. - category_id: Optional[:class:`int`] - The category channel ID this channel belongs to, if applicable. - position: :class:`int` - The position in the channel list. This is a number that starts at 0. e.g. the - top channel is position 0. - bitrate: :class:`int` - The channel's preferred audio bitrate in bits per second. - user_limit: :class:`int` - The channel's limit for number of members that can be in a stage channel. - rtc_region: Optional[:class:`str`] - The region for the stage channel's voice communication. - A value of ``None`` indicates automatic voice region detection. - video_quality_mode: :class:`VideoQualityMode` - The camera video quality for the stage channel's participants. - - .. versionadded:: 2.0 - last_message_id: Optional[:class:`int`] - The last message ID of the message sent to this channel. It may - *not* point to an existing or valid message. - - .. versionadded:: 2.2 - slowmode_delay: :class:`int` - The number of seconds a member must wait between sending messages - in this channel. A value of ``0`` denotes that it is disabled. - Bots and users with :attr:`~Permissions.manage_channels` or - :attr:`~Permissions.manage_messages` bypass slowmode. - - .. versionadded:: 2.2 - """ - - __slots__ = ('topic',) - - def __repr__(self) -> str: - attrs = [ - ('id', self.id), - ('name', self.name), - ('topic', self.topic), - ('rtc_region', self.rtc_region), - ('position', self.position), - ('bitrate', self.bitrate), - ('video_quality_mode', self.video_quality_mode), - ('user_limit', self.user_limit), - ('category_id', self.category_id), - ] - joined = ' '.join('%s=%r' % t for t in attrs) - return f'<{self.__class__.__name__} {joined}>' - - def _update(self, guild: Guild, data: StageChannelPayload) -> None: - super()._update(guild, data) - self.topic: Optional[str] = data.get('topic') - - @property - def _scheduled_event_entity_type(self) -> Optional[EntityType]: - return EntityType.stage_instance - - @property - def requesting_to_speak(self) -> List[Member]: - """List[:class:`Member`]: A list of members who are requesting to speak in the stage channel.""" - return [member for member in self.members if member.voice and member.voice.requested_to_speak_at is not None] - - @property - def speakers(self) -> List[Member]: - """List[:class:`Member`]: A list of members who have been permitted to speak in the stage channel. - - .. versionadded:: 2.0 - """ - return [ - member - for member in self.members - if member.voice and not member.voice.suppress and member.voice.requested_to_speak_at is None - ] - - @property - def listeners(self) -> List[Member]: - """List[:class:`Member`]: A list of members who are listening in the stage channel. - - .. versionadded:: 2.0 - """ - return [member for member in self.members if member.voice and member.voice.suppress] - - @property - def moderators(self) -> List[Member]: - """List[:class:`Member`]: A list of members who are moderating the stage channel. - - .. versionadded:: 2.0 - """ - required_permissions = Permissions.stage_moderator() - return [member for member in self.members if self.permissions_for(member) >= required_permissions] - - @property - def type(self) -> Literal[ChannelType.stage_voice]: - """:class:`ChannelType`: The channel's Discord type.""" - return ChannelType.stage_voice - - @utils.copy_doc(discord.abc.GuildChannel.clone) - async def clone(self, *, name: Optional[str] = None, reason: Optional[str] = None) -> StageChannel: - return await self._clone_impl({}, name=name, reason=reason) - - @property - def instance(self) -> Optional[StageInstance]: - """Optional[:class:`StageInstance`]: The running stage instance of the stage channel. - - .. versionadded:: 2.0 - """ - return utils.get(self.guild.stage_instances, channel_id=self.id) - - async def create_instance( - self, - *, - topic: str, - privacy_level: PrivacyLevel = MISSING, - send_start_notification: bool = False, - reason: Optional[str] = None, - ) -> StageInstance: - """|coro| - - Create a stage instance. - - You must have :attr:`~Permissions.manage_channels` to do this. - - .. versionadded:: 2.0 - - Parameters - ----------- - topic: :class:`str` - The stage instance's topic. - privacy_level: :class:`PrivacyLevel` - The stage instance's privacy level. Defaults to :attr:`PrivacyLevel.guild_only`. - send_start_notification: :class:`bool` - Whether to send a start notification. This sends a push notification to @everyone if ``True``. Defaults to ``False``. - You must have :attr:`~Permissions.mention_everyone` to do this. - - .. versionadded:: 2.3 - reason: :class:`str` - The reason the stage instance was created. Shows up on the audit log. - - Raises - ------ - TypeError - If the ``privacy_level`` parameter is not the proper type. - Forbidden - You do not have permissions to create a stage instance. - HTTPException - Creating a stage instance failed. - - Returns - -------- - :class:`StageInstance` - The newly created stage instance. - """ - - payload: Dict[str, Any] = {'channel_id': self.id, 'topic': topic} - - if privacy_level is not MISSING: - if not isinstance(privacy_level, PrivacyLevel): - raise TypeError('privacy_level field must be of type PrivacyLevel') - - payload['privacy_level'] = privacy_level.value - - payload['send_start_notification'] = send_start_notification - - data = await self._state.http.create_stage_instance(**payload, reason=reason) - return StageInstance(guild=self.guild, state=self._state, data=data) - - async def fetch_instance(self) -> StageInstance: - """|coro| - - Gets the running :class:`StageInstance`. - - .. versionadded:: 2.0 - - Raises - ------- - NotFound - The stage instance or channel could not be found. - HTTPException - Getting the stage instance failed. - - Returns - -------- - :class:`StageInstance` - The stage instance. - """ - data = await self._state.http.get_stage_instance(self.id) - return StageInstance(guild=self.guild, state=self._state, data=data) - - @overload - async def edit(self) -> None: - ... - - @overload - async def edit(self, *, position: int, reason: Optional[str] = ...) -> None: - ... - - @overload - async def edit( - self, - *, - name: str = ..., - nsfw: bool = ..., - user_limit: int = ..., - position: int = ..., - sync_permissions: int = ..., - category: Optional[CategoryChannel] = ..., - overwrites: Mapping[OverwriteKeyT, PermissionOverwrite] = ..., - rtc_region: Optional[str] = ..., - video_quality_mode: VideoQualityMode = ..., - slowmode_delay: int = ..., - reason: Optional[str] = ..., - ) -> StageChannel: - ... - - async def edit(self, *, reason: Optional[str] = None, **options: Any) -> Optional[StageChannel]: - """|coro| - - Edits the channel. - - You must have :attr:`~Permissions.manage_channels` to do this. - - .. versionchanged:: 2.0 - The ``topic`` parameter must now be set via :attr:`create_instance`. - - .. versionchanged:: 2.0 - Edits are no longer in-place, the newly edited channel is returned instead. - - .. versionchanged:: 2.0 - The ``region`` parameter now accepts :class:`str` instead of an enum. - - .. versionchanged:: 2.0 - This function will now raise :exc:`TypeError` instead of - ``InvalidArgument``. - - Parameters - ---------- - name: :class:`str` - The new channel's name. - position: :class:`int` - The new channel's position. - nsfw: :class:`bool` - To mark the channel as NSFW or not. - user_limit: :class:`int` - The new channel's user limit. - sync_permissions: :class:`bool` - Whether to sync permissions with the channel's new or pre-existing - category. Defaults to ``False``. - category: Optional[:class:`CategoryChannel`] - The new category for this channel. Can be ``None`` to remove the - category. - slowmode_delay: :class:`int` - Specifies the slowmode rate limit for user in this channel, in seconds. - A value of ``0`` disables slowmode. The maximum value possible is ``21600``. - reason: Optional[:class:`str`] - The reason for editing this channel. Shows up on the audit log. - overwrites: :class:`Mapping` - A :class:`Mapping` of target (either a role or a member) to - :class:`PermissionOverwrite` to apply to the channel. - rtc_region: Optional[:class:`str`] - The new region for the stage channel's voice communication. - A value of ``None`` indicates automatic voice region detection. - video_quality_mode: :class:`VideoQualityMode` - The camera video quality for the stage channel's participants. - - .. versionadded:: 2.0 - - Raises - ------ - ValueError - If the permission overwrite information is not in proper form. - Forbidden - You do not have permissions to edit the channel. - HTTPException - Editing the channel failed. - - Returns - -------- - Optional[:class:`.StageChannel`] - The newly edited stage channel. If the edit was only positional - then ``None`` is returned instead. - """ - - payload = await self._edit(options, reason=reason) - if payload is not None: - # the payload will always be the proper channel payload - return self.__class__(state=self._state, guild=self.guild, data=payload) # type: ignore - - -class CategoryChannel(discord.abc.GuildChannel, Hashable): - """Represents a Discord channel category. - - These are useful to group channels to logical compartments. - - .. container:: operations - - .. describe:: x == y - - Checks if two channels are equal. - - .. describe:: x != y - - Checks if two channels are not equal. - - .. describe:: hash(x) - - Returns the category's hash. - - .. describe:: str(x) - - Returns the category's name. - - Attributes - ----------- - name: :class:`str` - The category name. - guild: :class:`Guild` - The guild the category belongs to. - id: :class:`int` - The category channel ID. - position: :class:`int` - The position in the category list. This is a number that starts at 0. e.g. the - top category is position 0. - nsfw: :class:`bool` - If the channel is marked as "not safe for work". - - .. note:: - - To check if the channel or the guild of that channel are marked as NSFW, consider :meth:`is_nsfw` instead. - """ - - __slots__ = ('name', 'id', 'guild', 'nsfw', '_state', 'position', '_overwrites', 'category_id') - - def __init__(self, *, state: ConnectionState, guild: Guild, data: CategoryChannelPayload): - self._state: ConnectionState = state - self.id: int = int(data['id']) - self._update(guild, data) - - def __repr__(self) -> str: - return f'' - - def _update(self, guild: Guild, data: CategoryChannelPayload) -> None: - self.guild: Guild = guild - self.name: str = data['name'] - self.category_id: Optional[int] = utils._get_as_snowflake(data, 'parent_id') - self.nsfw: bool = data.get('nsfw', False) - self.position: int = data['position'] - self._fill_overwrites(data) - - @property - def _sorting_bucket(self) -> int: - return ChannelType.category.value - - @property - def _scheduled_event_entity_type(self) -> Optional[EntityType]: - return None - - @property - def type(self) -> Literal[ChannelType.category]: - """:class:`ChannelType`: The channel's Discord type.""" - return ChannelType.category - - def is_nsfw(self) -> bool: - """:class:`bool`: Checks if the category is NSFW.""" - return self.nsfw - - @utils.copy_doc(discord.abc.GuildChannel.clone) - async def clone(self, *, name: Optional[str] = None, reason: Optional[str] = None) -> CategoryChannel: - return await self._clone_impl({'nsfw': self.nsfw}, name=name, reason=reason) - - @overload - async def edit(self) -> None: - ... - - @overload - async def edit(self, *, position: int, reason: Optional[str] = ...) -> None: - ... - - @overload - async def edit( - self, - *, - name: str = ..., - position: int = ..., - nsfw: bool = ..., - overwrites: Mapping[OverwriteKeyT, PermissionOverwrite] = ..., - reason: Optional[str] = ..., - ) -> CategoryChannel: - ... - - async def edit(self, *, reason: Optional[str] = None, **options: Any) -> Optional[CategoryChannel]: - """|coro| - - Edits the channel. - - You must have :attr:`~Permissions.manage_channels` to do this. - - .. versionchanged:: 1.3 - The ``overwrites`` keyword-only parameter was added. - - .. versionchanged:: 2.0 - Edits are no longer in-place, the newly edited channel is returned instead. - - .. versionchanged:: 2.0 - This function will now raise :exc:`TypeError` or - :exc:`ValueError` instead of ``InvalidArgument``. - - Parameters - ---------- - name: :class:`str` - The new category's name. - position: :class:`int` - The new category's position. - nsfw: :class:`bool` - To mark the category as NSFW or not. - reason: Optional[:class:`str`] - The reason for editing this category. Shows up on the audit log. - overwrites: :class:`Mapping` - A :class:`Mapping` of target (either a role or a member) to - :class:`PermissionOverwrite` to apply to the channel. - - Raises - ------ - ValueError - If position is less than 0 or greater than the number of categories. - TypeError - The overwrite information is not in proper form. - Forbidden - You do not have permissions to edit the category. - HTTPException - Editing the category failed. - - Returns - -------- - Optional[:class:`.CategoryChannel`] - The newly edited category channel. If the edit was only positional - then ``None`` is returned instead. - """ - - payload = await self._edit(options, reason=reason) - if payload is not None: - # the payload will always be the proper channel payload - return self.__class__(state=self._state, guild=self.guild, data=payload) # type: ignore - - @utils.copy_doc(discord.abc.GuildChannel.move) - async def move(self, **kwargs: Any) -> None: - kwargs.pop('category', None) - await super().move(**kwargs) - - @property - def channels(self) -> List[GuildChannelType]: - """List[:class:`abc.GuildChannel`]: Returns the channels that are under this category. - - These are sorted by the official Discord UI, which places voice channels below the text channels. - """ - - def comparator(channel): - return (not isinstance(channel, TextChannel), channel.position) - - ret = [c for c in self.guild.channels if c.category_id == self.id] - ret.sort(key=comparator) - return ret - - @property - def text_channels(self) -> List[TextChannel]: - """List[:class:`TextChannel`]: Returns the text channels that are under this category.""" - ret = [c for c in self.guild.channels if c.category_id == self.id and isinstance(c, TextChannel)] - ret.sort(key=lambda c: (c.position, c.id)) - return ret - - @property - def voice_channels(self) -> List[VoiceChannel]: - """List[:class:`VoiceChannel`]: Returns the voice channels that are under this category.""" - ret = [c for c in self.guild.channels if c.category_id == self.id and isinstance(c, VoiceChannel)] - ret.sort(key=lambda c: (c.position, c.id)) - return ret - - @property - def stage_channels(self) -> List[StageChannel]: - """List[:class:`StageChannel`]: Returns the stage channels that are under this category. - - .. versionadded:: 1.7 - """ - ret = [c for c in self.guild.channels if c.category_id == self.id and isinstance(c, StageChannel)] - ret.sort(key=lambda c: (c.position, c.id)) - return ret - - async def create_text_channel(self, name: str, **options: Any) -> TextChannel: - """|coro| - - A shortcut method to :meth:`Guild.create_text_channel` to create a :class:`TextChannel` in the category. - - Returns - ------- - :class:`TextChannel` - The channel that was just created. - """ - return await self.guild.create_text_channel(name, category=self, **options) - - async def create_voice_channel(self, name: str, **options: Any) -> VoiceChannel: - """|coro| - - A shortcut method to :meth:`Guild.create_voice_channel` to create a :class:`VoiceChannel` in the category. - - Returns - ------- - :class:`VoiceChannel` - The channel that was just created. - """ - return await self.guild.create_voice_channel(name, category=self, **options) - - async def create_stage_channel(self, name: str, **options: Any) -> StageChannel: - """|coro| - - A shortcut method to :meth:`Guild.create_stage_channel` to create a :class:`StageChannel` in the category. - - .. versionadded:: 1.7 - - Returns - ------- - :class:`StageChannel` - The channel that was just created. - """ - return await self.guild.create_stage_channel(name, category=self, **options) - - async def create_forum(self, name: str, **options: Any) -> ForumChannel: - """|coro| - - A shortcut method to :meth:`Guild.create_forum` to create a :class:`ForumChannel` in the category. - - .. versionadded:: 2.0 - - Returns - -------- - :class:`ForumChannel` - The channel that was just created. - """ - return await self.guild.create_forum(name, category=self, **options) - - -class ForumTag(Hashable): - """Represents a forum tag that can be applied to a thread within a :class:`ForumChannel`. - - .. versionadded:: 2.1 - - .. container:: operations - - .. describe:: x == y - - Checks if two forum tags are equal. - - .. describe:: x != y - - Checks if two forum tags are not equal. - - .. describe:: hash(x) - - Returns the forum tag's hash. - - .. describe:: str(x) - - Returns the forum tag's name. - - - Attributes - ----------- - id: :class:`int` - The ID of the tag. If this was manually created then the ID will be ``0``. - name: :class:`str` - The name of the tag. Can only be up to 20 characters. - moderated: :class:`bool` - Whether this tag can only be added or removed by a moderator with - the :attr:`~Permissions.manage_threads` permission. - emoji: Optional[:class:`PartialEmoji`] - The emoji that is used to represent this tag. - Note that if the emoji is a custom emoji, it will *not* have name information. - """ - - __slots__ = ('name', 'id', 'moderated', 'emoji') - - def __init__(self, *, name: str, emoji: Optional[EmojiInputType] = None, moderated: bool = False) -> None: - self.name: str = name - self.id: int = 0 - self.moderated: bool = moderated - self.emoji: Optional[PartialEmoji] = None - if isinstance(emoji, _EmojiTag): - self.emoji = emoji._to_partial() - elif isinstance(emoji, str): - self.emoji = PartialEmoji.from_str(emoji) - elif emoji is not None: - raise TypeError(f'emoji must be a Emoji, PartialEmoji, str or None not {emoji.__class__.__name__}') - - @classmethod - def from_data(cls, *, state: ConnectionState, data: ForumTagPayload) -> Self: - self = cls.__new__(cls) - self.name = data['name'] - self.id = int(data['id']) - self.moderated = data.get('moderated', False) - - emoji_name = data['emoji_name'] or '' - emoji_id = utils._get_as_snowflake(data, 'emoji_id') or None # Coerce 0 -> None - if not emoji_name and not emoji_id: - self.emoji = None - else: - self.emoji = PartialEmoji.with_state(state=state, name=emoji_name, id=emoji_id) - return self - - def to_dict(self) -> Dict[str, Any]: - payload: Dict[str, Any] = { - 'name': self.name, - 'moderated': self.moderated, - } - if self.emoji is not None: - payload.update(self.emoji._to_forum_tag_payload()) - else: - payload.update(emoji_id=None, emoji_name=None) - - if self.id: - payload['id'] = self.id - - return payload - - def __repr__(self) -> str: - return f'' - - def __str__(self) -> str: - return self.name - - -class ForumChannel(discord.abc.GuildChannel, Hashable): - """Represents a Discord guild forum channel. - - .. versionadded:: 2.0 - - .. container:: operations - - .. describe:: x == y - - Checks if two forums are equal. - - .. describe:: x != y - - Checks if two forums are not equal. - - .. describe:: hash(x) - - Returns the forum's hash. - - .. describe:: str(x) - - Returns the forum's name. - - Attributes - ----------- - name: :class:`str` - The forum name. - guild: :class:`Guild` - The guild the forum belongs to. - id: :class:`int` - The forum ID. - category_id: Optional[:class:`int`] - The category channel ID this forum belongs to, if applicable. - topic: Optional[:class:`str`] - The forum's topic. ``None`` if it doesn't exist. Called "Guidelines" in the UI. - Can be up to 4096 characters long. - position: :class:`int` - The position in the channel list. This is a number that starts at 0. e.g. the - top channel is position 0. - last_message_id: Optional[:class:`int`] - The last thread ID that was created on this forum. This technically also - coincides with the message ID that started the thread that was created. - It may *not* point to an existing or valid thread or message. - slowmode_delay: :class:`int` - The number of seconds a member must wait between creating threads - in this forum. A value of ``0`` denotes that it is disabled. - Bots and users with :attr:`~Permissions.manage_channels` or - :attr:`~Permissions.manage_messages` bypass slowmode. - nsfw: :class:`bool` - If the forum is marked as "not safe for work" or "age restricted". - default_auto_archive_duration: :class:`int` - The default auto archive duration in minutes for threads created in this forum. - default_thread_slowmode_delay: :class:`int` - The default slowmode delay in seconds for threads created in this forum. - - .. versionadded:: 2.1 - default_reaction_emoji: Optional[:class:`PartialEmoji`] - The default reaction emoji for threads created in this forum to show in the - add reaction button. - - .. versionadded:: 2.1 - default_layout: :class:`ForumLayoutType` - The default layout for posts in this forum channel. - Defaults to :attr:`ForumLayoutType.not_set`. - - .. versionadded:: 2.2 - default_sort_order: Optional[:class:`ForumOrderType`] - The default sort order for posts in this forum channel. - - .. versionadded:: 2.3 - """ - - __slots__ = ( - 'name', - 'id', - 'guild', - 'topic', - '_state', - '_flags', - 'nsfw', - 'category_id', - 'position', - 'slowmode_delay', - '_overwrites', - 'last_message_id', - 'default_auto_archive_duration', - 'default_thread_slowmode_delay', - 'default_reaction_emoji', - 'default_layout', - 'default_sort_order', - '_available_tags', - '_flags', - ) - - def __init__(self, *, state: ConnectionState, guild: Guild, data: ForumChannelPayload): - self._state: ConnectionState = state - self.id: int = int(data['id']) - self._update(guild, data) - - def __repr__(self) -> str: - attrs = [ - ('id', self.id), - ('name', self.name), - ('position', self.position), - ('nsfw', self.nsfw), - ('category_id', self.category_id), - ] - joined = ' '.join('%s=%r' % t for t in attrs) - return f'<{self.__class__.__name__} {joined}>' - - def _update(self, guild: Guild, data: ForumChannelPayload) -> None: - self.guild: Guild = guild - self.name: str = data['name'] - self.category_id: Optional[int] = utils._get_as_snowflake(data, 'parent_id') - self.topic: Optional[str] = data.get('topic') - self.position: int = data['position'] - self.nsfw: bool = data.get('nsfw', False) - self.slowmode_delay: int = data.get('rate_limit_per_user', 0) - self.default_auto_archive_duration: ThreadArchiveDuration = data.get('default_auto_archive_duration', 1440) - self.last_message_id: Optional[int] = utils._get_as_snowflake(data, 'last_message_id') - # This takes advantage of the fact that dicts are ordered since Python 3.7 - tags = [ForumTag.from_data(state=self._state, data=tag) for tag in data.get('available_tags', [])] - self.default_thread_slowmode_delay: int = data.get('default_thread_rate_limit_per_user', 0) - self.default_layout: ForumLayoutType = try_enum(ForumLayoutType, data.get('default_forum_layout', 0)) - self._available_tags: Dict[int, ForumTag] = {tag.id: tag for tag in tags} - - self.default_reaction_emoji: Optional[PartialEmoji] = None - default_reaction_emoji = data.get('default_reaction_emoji') - if default_reaction_emoji: - self.default_reaction_emoji = PartialEmoji.with_state( - state=self._state, - id=utils._get_as_snowflake(default_reaction_emoji, 'emoji_id') or None, # Coerce 0 -> None - name=default_reaction_emoji.get('emoji_name') or '', - ) - - self.default_sort_order: Optional[ForumOrderType] = None - default_sort_order = data.get('default_sort_order') - if default_sort_order is not None: - self.default_sort_order = try_enum(ForumOrderType, default_sort_order) - - self._flags: int = data.get('flags', 0) - self._fill_overwrites(data) - - @property - def type(self) -> Literal[ChannelType.forum]: - """:class:`ChannelType`: The channel's Discord type.""" - return ChannelType.forum - - @property - def _sorting_bucket(self) -> int: - return ChannelType.text.value - - @property - def _scheduled_event_entity_type(self) -> Optional[EntityType]: - return None - - @utils.copy_doc(discord.abc.GuildChannel.permissions_for) - def permissions_for(self, obj: Union[Member, Role], /) -> Permissions: - base = super().permissions_for(obj) - self._apply_implicit_permissions(base) - - # text channels do not have voice related permissions - denied = Permissions.voice() - base.value &= ~denied.value - return base - - def get_thread(self, thread_id: int, /) -> Optional[Thread]: - """Returns a thread with the given ID. - - .. note:: - - This does not always retrieve archived threads, as they are not retained in the internal - cache. Use :func:`Guild.fetch_channel` instead. - - .. versionadded:: 2.2 - - Parameters - ----------- - thread_id: :class:`int` - The ID to search for. - - Returns - -------- - Optional[:class:`Thread`] - The returned thread or ``None`` if not found. - """ - thread = self.guild.get_thread(thread_id) - if thread is not None and thread.parent_id == self.id: - return thread - return None - - @property - def threads(self) -> List[Thread]: - """List[:class:`Thread`]: Returns all the threads that you can see.""" - return [thread for thread in self.guild._threads.values() if thread.parent_id == self.id] - - @property - def flags(self) -> ChannelFlags: - """:class:`ChannelFlags`: The flags associated with this thread. - - .. versionadded:: 2.1 - """ - return ChannelFlags._from_value(self._flags) - - @property - def available_tags(self) -> Sequence[ForumTag]: - """Sequence[:class:`ForumTag`]: Returns all the available tags for this forum. - - .. versionadded:: 2.1 - """ - return utils.SequenceProxy(self._available_tags.values()) - - def get_tag(self, tag_id: int, /) -> Optional[ForumTag]: - """Returns the tag with the given ID. - - .. versionadded:: 2.1 - - Parameters - ---------- - tag_id: :class:`int` - The ID to search for. - - Returns - ------- - Optional[:class:`ForumTag`] - The tag with the given ID, or ``None`` if not found. - """ - return self._available_tags.get(tag_id) - - def is_nsfw(self) -> bool: - """:class:`bool`: Checks if the forum is NSFW.""" - return self.nsfw - - @utils.copy_doc(discord.abc.GuildChannel.clone) - async def clone(self, *, name: Optional[str] = None, reason: Optional[str] = None) -> ForumChannel: - return await self._clone_impl( - {'topic': self.topic, 'nsfw': self.nsfw, 'rate_limit_per_user': self.slowmode_delay}, name=name, reason=reason - ) - - @overload - async def edit(self) -> None: - ... - - @overload - async def edit(self, *, position: int, reason: Optional[str] = ...) -> None: - ... - - @overload - async def edit( - self, - *, - reason: Optional[str] = ..., - name: str = ..., - topic: str = ..., - position: int = ..., - nsfw: bool = ..., - sync_permissions: bool = ..., - category: Optional[CategoryChannel] = ..., - slowmode_delay: int = ..., - default_auto_archive_duration: ThreadArchiveDuration = ..., - type: ChannelType = ..., - overwrites: Mapping[OverwriteKeyT, PermissionOverwrite] = ..., - available_tags: Sequence[ForumTag] = ..., - default_thread_slowmode_delay: int = ..., - default_reaction_emoji: Optional[EmojiInputType] = ..., - default_layout: ForumLayoutType = ..., - default_sort_order: ForumOrderType = ..., - require_tag: bool = ..., - ) -> ForumChannel: - ... - - async def edit(self, *, reason: Optional[str] = None, **options: Any) -> Optional[ForumChannel]: - """|coro| - - Edits the forum. - - You must have :attr:`~Permissions.manage_channels` to do this. - - Parameters - ---------- - name: :class:`str` - The new forum name. - topic: :class:`str` - The new forum's topic. - position: :class:`int` - The new forum's position. - nsfw: :class:`bool` - To mark the forum as NSFW or not. - sync_permissions: :class:`bool` - Whether to sync permissions with the forum's new or pre-existing - category. Defaults to ``False``. - category: Optional[:class:`CategoryChannel`] - The new category for this forum. Can be ``None`` to remove the - category. - slowmode_delay: :class:`int` - Specifies the slowmode rate limit for user in this forum, in seconds. - A value of ``0`` disables slowmode. The maximum value possible is ``21600``. - type: :class:`ChannelType` - Change the type of this text forum. Currently, only conversion between - :attr:`ChannelType.text` and :attr:`ChannelType.news` is supported. This - is only available to guilds that contain ``NEWS`` in :attr:`Guild.features`. - reason: Optional[:class:`str`] - The reason for editing this forum. Shows up on the audit log. - overwrites: :class:`Mapping` - A :class:`Mapping` of target (either a role or a member) to - :class:`PermissionOverwrite` to apply to the forum. - default_auto_archive_duration: :class:`int` - The new default auto archive duration in minutes for threads created in this channel. - Must be one of ``60``, ``1440``, ``4320``, or ``10080``. - available_tags: Sequence[:class:`ForumTag`] - The new available tags for this forum. - - .. versionadded:: 2.1 - default_thread_slowmode_delay: :class:`int` - The new default slowmode delay for threads in this channel. - - .. versionadded:: 2.1 - default_reaction_emoji: Optional[Union[:class:`Emoji`, :class:`PartialEmoji`, :class:`str`]] - The new default reaction emoji for threads in this channel. - - .. versionadded:: 2.1 - default_layout: :class:`ForumLayoutType` - The new default layout for posts in this forum. - - .. versionadded:: 2.2 - default_sort_order: Optional[:class:`ForumOrderType`] - The new default sort order for posts in this forum. - - .. versionadded:: 2.3 - require_tag: :class:`bool` - Whether to require a tag for threads in this channel or not. - - .. versionadded:: 2.1 - - Raises - ------ - ValueError - The new ``position`` is less than 0 or greater than the number of channels. - TypeError - The permission overwrite information is not in proper form or a type - is not the expected type. - Forbidden - You do not have permissions to edit the forum. - HTTPException - Editing the forum failed. - - Returns - -------- - Optional[:class:`.ForumChannel`] - The newly edited forum channel. If the edit was only positional - then ``None`` is returned instead. - """ - - try: - tags: Sequence[ForumTag] = options.pop('available_tags') - except KeyError: - pass - else: - options['available_tags'] = [tag.to_dict() for tag in tags] - - try: - default_reaction_emoji: Optional[EmojiInputType] = options.pop('default_reaction_emoji') - except KeyError: - pass - else: - if default_reaction_emoji is None: - options['default_reaction_emoji'] = None - elif isinstance(default_reaction_emoji, _EmojiTag): - options['default_reaction_emoji'] = default_reaction_emoji._to_partial()._to_forum_tag_payload() - elif isinstance(default_reaction_emoji, str): - options['default_reaction_emoji'] = PartialEmoji.from_str(default_reaction_emoji)._to_forum_tag_payload() - - try: - require_tag = options.pop('require_tag') - except KeyError: - pass - else: - flags = self.flags - flags.require_tag = require_tag - options['flags'] = flags.value - - try: - layout = options.pop('default_layout') - except KeyError: - pass - else: - if not isinstance(layout, ForumLayoutType): - raise TypeError(f'default_layout parameter must be a ForumLayoutType not {layout.__class__.__name__}') - - options['default_forum_layout'] = layout.value - - try: - sort_order = options.pop('default_sort_order') - except KeyError: - pass - else: - if sort_order is None: - options['default_sort_order'] = None - else: - if not isinstance(sort_order, ForumOrderType): - raise TypeError( - f'default_sort_order parameter must be a ForumOrderType not {sort_order.__class__.__name__}' - ) - - options['default_sort_order'] = sort_order.value - - payload = await self._edit(options, reason=reason) - if payload is not None: - # the payload will always be the proper channel payload - return self.__class__(state=self._state, guild=self.guild, data=payload) # type: ignore - - async def create_tag( - self, - *, - name: str, - emoji: Optional[PartialEmoji] = None, - moderated: bool = False, - reason: Optional[str] = None, - ) -> ForumTag: - """|coro| - - Creates a new tag in this forum. - - You must have :attr:`~Permissions.manage_channels` to do this. - - Parameters - ---------- - name: :class:`str` - The name of the tag. Can only be up to 20 characters. - emoji: Optional[Union[:class:`str`, :class:`PartialEmoji`]] - The emoji to use for the tag. - moderated: :class:`bool` - Whether the tag can only be applied by moderators. - reason: Optional[:class:`str`] - The reason for creating this tag. Shows up on the audit log. - - Raises - ------ - Forbidden - You do not have permissions to create a tag in this forum. - HTTPException - Creating the tag failed. - - Returns - ------- - :class:`ForumTag` - The newly created tag. - """ - - prior = list(self._available_tags.values()) - result = ForumTag(name=name, emoji=emoji, moderated=moderated) - prior.append(result) - payload = await self._state.http.edit_channel( - self.id, reason=reason, available_tags=[tag.to_dict() for tag in prior] - ) - try: - result.id = int(payload['available_tags'][-1]['id']) # type: ignore - except (KeyError, IndexError, ValueError): - pass - - return result - - async def create_thread( - self, - *, - name: str, - auto_archive_duration: ThreadArchiveDuration = MISSING, - slowmode_delay: Optional[int] = None, - content: Optional[str] = None, - tts: bool = False, - embed: Embed = MISSING, - embeds: Sequence[Embed] = MISSING, - file: File = MISSING, - files: Sequence[File] = MISSING, - stickers: Sequence[Union[GuildSticker, StickerItem]] = MISSING, - allowed_mentions: AllowedMentions = MISSING, - mention_author: bool = MISSING, - applied_tags: Sequence[ForumTag] = MISSING, - view: View = MISSING, - suppress_embeds: bool = False, - reason: Optional[str] = None, - ) -> ThreadWithMessage: - """|coro| - - Creates a thread in this forum. - - This thread is a public thread with the initial message given. Currently in order - to start a thread in this forum, the user needs :attr:`~discord.Permissions.send_messages`. - - You must send at least one of ``content``, ``embed``, ``embeds``, ``file``, ``files``, - or ``view`` to create a thread in a forum, since forum channels must have a starter message. - - Parameters - ----------- - name: :class:`str` - The name of the thread. - auto_archive_duration: :class:`int` - The duration in minutes before a thread is automatically hidden from the channel list. - If not provided, the channel's default auto archive duration is used. - - Must be one of ``60``, ``1440``, ``4320``, or ``10080``, if provided. - slowmode_delay: Optional[:class:`int`] - Specifies the slowmode rate limit for user in this channel, in seconds. - The maximum value possible is ``21600``. By default no slowmode rate limit - if this is ``None``. - content: Optional[:class:`str`] - The content of the message to send with the thread. - tts: :class:`bool` - Indicates if the message should be sent using text-to-speech. - embed: :class:`~discord.Embed` - The rich embed for the content. - embeds: List[:class:`~discord.Embed`] - A list of embeds to upload. Must be a maximum of 10. - file: :class:`~discord.File` - The file to upload. - files: List[:class:`~discord.File`] - A list of files to upload. Must be a maximum of 10. - allowed_mentions: :class:`~discord.AllowedMentions` - Controls the mentions being processed in this message. If this is - passed, then the object is merged with :attr:`~discord.Client.allowed_mentions`. - The merging behaviour only overrides attributes that have been explicitly passed - to the object, otherwise it uses the attributes set in :attr:`~discord.Client.allowed_mentions`. - If no object is passed at all then the defaults given by :attr:`~discord.Client.allowed_mentions` - are used instead. - mention_author: :class:`bool` - If set, overrides the :attr:`~discord.AllowedMentions.replied_user` attribute of ``allowed_mentions``. - applied_tags: List[:class:`discord.ForumTag`] - A list of tags to apply to the thread. - view: :class:`discord.ui.View` - A Discord UI View to add to the message. - stickers: Sequence[Union[:class:`~discord.GuildSticker`, :class:`~discord.StickerItem`]] - A list of stickers to upload. Must be a maximum of 3. - suppress_embeds: :class:`bool` - Whether to suppress embeds for the message. This sends the message without any embeds if set to ``True``. - reason: :class:`str` - The reason for creating a new thread. Shows up on the audit log. - - Raises - ------- - Forbidden - You do not have permissions to create a thread. - HTTPException - Starting the thread failed. - ValueError - The ``files`` or ``embeds`` list is not of the appropriate size. - TypeError - You specified both ``file`` and ``files``, - or you specified both ``embed`` and ``embeds``. - - Returns - -------- - Tuple[:class:`Thread`, :class:`Message`] - The created thread with the created message. - This is also accessible as a namedtuple with ``thread`` and ``message`` fields. - """ - - state = self._state - previous_allowed_mention = state.allowed_mentions - if stickers is MISSING: - sticker_ids = MISSING - else: - sticker_ids: SnowflakeList = [s.id for s in stickers] - - if view and not hasattr(view, '__discord_ui_view__'): - raise TypeError(f'view parameter must be View not {view.__class__.__name__}') - - if suppress_embeds: - from .message import MessageFlags # circular import - - flags = MessageFlags._from_value(4) - else: - flags = MISSING - - content = str(content) if content else MISSING - - channel_payload = { - 'name': name, - 'auto_archive_duration': auto_archive_duration or self.default_auto_archive_duration, - 'rate_limit_per_user': slowmode_delay, - 'type': 11, # Private threads don't seem to be allowed - } - - if applied_tags is not MISSING: - channel_payload['applied_tags'] = [str(tag.id) for tag in applied_tags] - - with handle_message_parameters( - content=content, - tts=tts, - file=file, - files=files, - embed=embed, - embeds=embeds, - allowed_mentions=allowed_mentions, - previous_allowed_mentions=previous_allowed_mention, - mention_author=None if mention_author is MISSING else mention_author, - stickers=sticker_ids, - view=view, - flags=flags, - channel_payload=channel_payload, - ) as params: - # Circular import - from .message import Message - - data = await state.http.start_thread_in_forum(self.id, params=params, reason=reason) - thread = Thread(guild=self.guild, state=self._state, data=data) - message = Message(state=self._state, channel=thread, data=data['message']) - if view and not view.is_finished(): - self._state.store_view(view, message.id) - - return ThreadWithMessage(thread=thread, message=message) - - async def webhooks(self) -> List[Webhook]: - """|coro| - - Gets the list of webhooks from this channel. - - You must have :attr:`~.Permissions.manage_webhooks` to do this. - - Raises - ------- - Forbidden - You don't have permissions to get the webhooks. - - Returns - -------- - List[:class:`Webhook`] - The webhooks for this channel. - """ - - from .webhook import Webhook - - data = await self._state.http.channel_webhooks(self.id) - return [Webhook.from_state(d, state=self._state) for d in data] - - async def create_webhook(self, *, name: str, avatar: Optional[bytes] = None, reason: Optional[str] = None) -> Webhook: - """|coro| - - Creates a webhook for this channel. - - You must have :attr:`~.Permissions.manage_webhooks` to do this. - - Parameters - ------------- - name: :class:`str` - The webhook's name. - avatar: Optional[:class:`bytes`] - A :term:`py:bytes-like object` representing the webhook's default avatar. - This operates similarly to :meth:`~ClientUser.edit`. - reason: Optional[:class:`str`] - The reason for creating this webhook. Shows up in the audit logs. - - Raises - ------- - HTTPException - Creating the webhook failed. - Forbidden - You do not have permissions to create a webhook. - - Returns - -------- - :class:`Webhook` - The created webhook. - """ - - from .webhook import Webhook - - if avatar is not None: - avatar = utils._bytes_to_base64_data(avatar) # type: ignore # Silence reassignment error - - data = await self._state.http.create_webhook(self.id, name=str(name), avatar=avatar, reason=reason) - return Webhook.from_state(data, state=self._state) - - async def archived_threads( - self, - *, - limit: Optional[int] = 100, - before: Optional[Union[Snowflake, datetime.datetime]] = None, - ) -> AsyncIterator[Thread]: - """Returns an :term:`asynchronous iterator` that iterates over all archived threads in this forum - in order of decreasing :attr:`Thread.archive_timestamp`. - - You must have :attr:`~Permissions.read_message_history` to do this. - - .. versionadded:: 2.0 - - Parameters - ----------- - limit: Optional[:class:`bool`] - The number of threads to retrieve. - If ``None``, retrieves every archived thread in the channel. Note, however, - that this would make it a slow operation. - before: Optional[Union[:class:`abc.Snowflake`, :class:`datetime.datetime`]] - Retrieve archived channels before the given date or ID. - - Raises - ------ - Forbidden - You do not have permissions to get archived threads. - HTTPException - The request to get the archived threads failed. - - Yields - ------- - :class:`Thread` - The archived threads. - """ - before_timestamp = None - - if isinstance(before, datetime.datetime): - before_timestamp = before.isoformat() - elif before is not None: - before_timestamp = utils.snowflake_time(before.id).isoformat() - - update_before = lambda data: data['thread_metadata']['archive_timestamp'] - - while True: - retrieve = 100 - if limit is not None: - if limit <= 0: - return - retrieve = max(2, min(retrieve, limit)) - - data = await self.guild._state.http.get_public_archived_threads(self.id, before=before_timestamp, limit=retrieve) - - threads = data.get('threads', []) - for raw_thread in threads: - yield Thread(guild=self.guild, state=self.guild._state, data=raw_thread) - # Currently the API doesn't let you request less than 2 threads. - # Bail out early if we had to retrieve more than what the limit was. - if limit is not None: - limit -= 1 - if limit <= 0: - return - - if not data.get('has_more', False): - return - - before_timestamp = update_before(threads[-1]) - - -class DMChannel(discord.abc.Messageable, discord.abc.PrivateChannel, Hashable): - """Represents a Discord direct message channel. - - .. container:: operations - - .. describe:: x == y - - Checks if two channels are equal. - - .. describe:: x != y - - Checks if two channels are not equal. - - .. describe:: hash(x) - - Returns the channel's hash. - - .. describe:: str(x) - - Returns a string representation of the channel - - Attributes - ---------- - recipient: Optional[:class:`User`] - The user you are participating with in the direct message channel. - If this channel is received through the gateway, the recipient information - may not be always available. - me: :class:`ClientUser` - The user presenting yourself. - id: :class:`int` - The direct message channel ID. - """ - - __slots__ = ('id', 'recipient', 'me', '_state') - - def __init__(self, *, me: ClientUser, state: ConnectionState, data: DMChannelPayload): - self._state: ConnectionState = state - self.recipient: Optional[User] = None - - recipients = data.get('recipients') - if recipients is not None: - self.recipient = state.store_user(recipients[0]) - - self.me: ClientUser = me - self.id: int = int(data['id']) - - async def _get_channel(self) -> Self: - return self - - def __str__(self) -> str: - if self.recipient: - return f'Direct Message with {self.recipient}' - return 'Direct Message with Unknown User' - - def __repr__(self) -> str: - return f'' - - @classmethod - def _from_message(cls, state: ConnectionState, channel_id: int) -> Self: - self = cls.__new__(cls) - self._state = state - self.id = channel_id - self.recipient = None - # state.user won't be None here - self.me = state.user # type: ignore - return self - - @property - def type(self) -> Literal[ChannelType.private]: - """:class:`ChannelType`: The channel's Discord type.""" - return ChannelType.private - - @property - def guild(self) -> Optional[Guild]: - """Optional[:class:`Guild`]: The guild this DM channel belongs to. Always ``None``. - - This is mainly provided for compatibility purposes in duck typing. - - .. versionadded:: 2.0 - """ - return None - - @property - def jump_url(self) -> str: - """:class:`str`: Returns a URL that allows the client to jump to the channel. - - .. versionadded:: 2.0 - """ - return f'https://discord.com/channels/@me/{self.id}' - - @property - def created_at(self) -> datetime.datetime: - """:class:`datetime.datetime`: Returns the direct message channel's creation time in UTC.""" - return utils.snowflake_time(self.id) - - def permissions_for(self, obj: Any = None, /) -> Permissions: - """Handles permission resolution for a :class:`User`. - - This function is there for compatibility with other channel types. - - Actual direct messages do not really have the concept of permissions. - - This returns all the Text related permissions set to ``True`` except: - - - :attr:`~Permissions.send_tts_messages`: You cannot send TTS messages in a DM. - - :attr:`~Permissions.manage_messages`: You cannot delete others messages in a DM. - - :attr:`~Permissions.create_private_threads`: There are no threads in a DM. - - :attr:`~Permissions.create_public_threads`: There are no threads in a DM. - - :attr:`~Permissions.manage_threads`: There are no threads in a DM. - - :attr:`~Permissions.send_messages_in_threads`: There are no threads in a DM. - - .. versionchanged:: 2.0 - - ``obj`` parameter is now positional-only. - - .. versionchanged:: 2.1 - - Thread related permissions are now set to ``False``. - - Parameters - ----------- - obj: :class:`User` - The user to check permissions for. This parameter is ignored - but kept for compatibility with other ``permissions_for`` methods. - - Returns - -------- - :class:`Permissions` - The resolved permissions. - """ - return Permissions._dm_permissions() - - def get_partial_message(self, message_id: int, /) -> PartialMessage: - """Creates a :class:`PartialMessage` from the message ID. - - This is useful if you want to work with a message and only have its ID without - doing an unnecessary API call. - - .. versionadded:: 1.6 - - .. versionchanged:: 2.0 - - ``message_id`` parameter is now positional-only. - - Parameters - ------------ - message_id: :class:`int` - The message ID to create a partial message for. - - Returns - --------- - :class:`PartialMessage` - The partial message. - """ - - from .message import PartialMessage - - return PartialMessage(channel=self, id=message_id) - - -class GroupChannel(discord.abc.Messageable, discord.abc.PrivateChannel, Hashable): - """Represents a Discord group channel. - - .. container:: operations - - .. describe:: x == y - - Checks if two channels are equal. - - .. describe:: x != y - - Checks if two channels are not equal. - - .. describe:: hash(x) - - Returns the channel's hash. - - .. describe:: str(x) - - Returns a string representation of the channel - - Attributes - ---------- - recipients: List[:class:`User`] - The users you are participating with in the group channel. - me: :class:`ClientUser` - The user presenting yourself. - id: :class:`int` - The group channel ID. - owner: Optional[:class:`User`] - The user that owns the group channel. - owner_id: :class:`int` - The owner ID that owns the group channel. - - .. versionadded:: 2.0 - name: Optional[:class:`str`] - The group channel's name if provided. - """ - - __slots__ = ('id', 'recipients', 'owner_id', 'owner', '_icon', 'name', 'me', '_state') - - def __init__(self, *, me: ClientUser, state: ConnectionState, data: GroupChannelPayload): - self._state: ConnectionState = state - self.id: int = int(data['id']) - self.me: ClientUser = me - self._update_group(data) - - def _update_group(self, data: GroupChannelPayload) -> None: - self.owner_id: Optional[int] = utils._get_as_snowflake(data, 'owner_id') - self._icon: Optional[str] = data.get('icon') - self.name: Optional[str] = data.get('name') - self.recipients: List[User] = [self._state.store_user(u) for u in data.get('recipients', [])] - - self.owner: Optional[BaseUser] - if self.owner_id == self.me.id: - self.owner = self.me - else: - self.owner = utils.find(lambda u: u.id == self.owner_id, self.recipients) - - async def _get_channel(self) -> Self: - return self - - def __str__(self) -> str: - if self.name: - return self.name - - if len(self.recipients) == 0: - return 'Unnamed' - - return ', '.join(map(lambda x: x.name, self.recipients)) - - def __repr__(self) -> str: - return f'' - - @property - def type(self) -> Literal[ChannelType.group]: - """:class:`ChannelType`: The channel's Discord type.""" - return ChannelType.group - - @property - def guild(self) -> Optional[Guild]: - """Optional[:class:`Guild`]: The guild this group channel belongs to. Always ``None``. - - This is mainly provided for compatibility purposes in duck typing. - - .. versionadded:: 2.0 - """ - return None - - @property - def icon(self) -> Optional[Asset]: - """Optional[:class:`Asset`]: Returns the channel's icon asset if available.""" - if self._icon is None: - return None - return Asset._from_icon(self._state, self.id, self._icon, path='channel') - - @property - def created_at(self) -> datetime.datetime: - """:class:`datetime.datetime`: Returns the channel's creation time in UTC.""" - return utils.snowflake_time(self.id) - - @property - def jump_url(self) -> str: - """:class:`str`: Returns a URL that allows the client to jump to the channel. - - .. versionadded:: 2.0 - """ - return f'https://discord.com/channels/@me/{self.id}' - - def permissions_for(self, obj: Snowflake, /) -> Permissions: - """Handles permission resolution for a :class:`User`. - - This function is there for compatibility with other channel types. - - Actual direct messages do not really have the concept of permissions. - - This returns all the Text related permissions set to ``True`` except: - - - :attr:`~Permissions.send_tts_messages`: You cannot send TTS messages in a DM. - - :attr:`~Permissions.manage_messages`: You cannot delete others messages in a DM. - - :attr:`~Permissions.create_private_threads`: There are no threads in a DM. - - :attr:`~Permissions.create_public_threads`: There are no threads in a DM. - - :attr:`~Permissions.manage_threads`: There are no threads in a DM. - - :attr:`~Permissions.send_messages_in_threads`: There are no threads in a DM. - - This also checks the kick_members permission if the user is the owner. - - .. versionchanged:: 2.0 - - ``obj`` parameter is now positional-only. - - .. versionchanged:: 2.1 - - Thread related permissions are now set to ``False``. - - Parameters - ----------- - obj: :class:`~discord.abc.Snowflake` - The user to check permissions for. - - Returns - -------- - :class:`Permissions` - The resolved permissions for the user. - """ - - base = Permissions._dm_permissions() - base.mention_everyone = True - - if obj.id == self.owner_id: - base.kick_members = True - - return base - - async def leave(self) -> None: - """|coro| - - Leave the group. - - If you are the only one in the group, this deletes it as well. - - Raises - ------- - HTTPException - Leaving the group failed. - """ - - await self._state.http.leave_group(self.id) - - -class PartialMessageable(discord.abc.Messageable, Hashable): - """Represents a partial messageable to aid with working messageable channels when - only a channel ID is present. - - The only way to construct this class is through :meth:`Client.get_partial_messageable`. - - Note that this class is trimmed down and has no rich attributes. - - .. versionadded:: 2.0 - - .. container:: operations - - .. describe:: x == y - - Checks if two partial messageables are equal. - - .. describe:: x != y - - Checks if two partial messageables are not equal. - - .. describe:: hash(x) - - Returns the partial messageable's hash. - - Attributes - ----------- - id: :class:`int` - The channel ID associated with this partial messageable. - guild_id: Optional[:class:`int`] - The guild ID associated with this partial messageable. - type: Optional[:class:`ChannelType`] - The channel type associated with this partial messageable, if given. - """ - - def __init__(self, state: ConnectionState, id: int, guild_id: Optional[int] = None, type: Optional[ChannelType] = None): - self._state: ConnectionState = state - self.id: int = id - self.guild_id: Optional[int] = guild_id - self.type: Optional[ChannelType] = type - - def __repr__(self) -> str: - return f'<{self.__class__.__name__} id={self.id} type={self.type!r}>' - - async def _get_channel(self) -> PartialMessageable: - return self - - @property - def guild(self) -> Optional[Guild]: - """Optional[:class:`Guild`]: The guild this partial messageable is in.""" - return self._state._get_guild(self.guild_id) - - @property - def jump_url(self) -> str: - """:class:`str`: Returns a URL that allows the client to jump to the channel.""" - if self.guild_id is None: - return f'https://discord.com/channels/@me/{self.id}' - return f'https://discord.com/channels/{self.guild_id}/{self.id}' - - @property - def created_at(self) -> datetime.datetime: - """:class:`datetime.datetime`: Returns the channel's creation time in UTC.""" - return utils.snowflake_time(self.id) - - def permissions_for(self, obj: Any = None, /) -> Permissions: - """Handles permission resolution for a :class:`User`. - - This function is there for compatibility with other channel types. - - Since partial messageables cannot reasonably have the concept of - permissions, this will always return :meth:`Permissions.none`. - - Parameters - ----------- - obj: :class:`User` - The user to check permissions for. This parameter is ignored - but kept for compatibility with other ``permissions_for`` methods. - - Returns - -------- - :class:`Permissions` - The resolved permissions. - """ - - return Permissions.none() - - def get_partial_message(self, message_id: int, /) -> PartialMessage: - """Creates a :class:`PartialMessage` from the message ID. - - This is useful if you want to work with a message and only have its ID without - doing an unnecessary API call. - - Parameters - ------------ - message_id: :class:`int` - The message ID to create a partial message for. - - Returns - --------- - :class:`PartialMessage` - The partial message. - """ - - from .message import PartialMessage - - return PartialMessage(channel=self, id=message_id) - - -def _guild_channel_factory(channel_type: int): - value = try_enum(ChannelType, channel_type) - if value is ChannelType.text: - return TextChannel, value - elif value is ChannelType.voice: - return VoiceChannel, value - elif value is ChannelType.category: - return CategoryChannel, value - elif value is ChannelType.news: - return TextChannel, value - elif value is ChannelType.stage_voice: - return StageChannel, value - elif value is ChannelType.forum: - return ForumChannel, value - else: - return None, value - - -def _channel_factory(channel_type: int): - cls, value = _guild_channel_factory(channel_type) - if value is ChannelType.private: - return DMChannel, value - elif value is ChannelType.group: - return GroupChannel, value - else: - return cls, value - - -def _threaded_channel_factory(channel_type: int): - cls, value = _channel_factory(channel_type) - if value in (ChannelType.private_thread, ChannelType.public_thread, ChannelType.news_thread): - return Thread, value - return cls, value - - -def _threaded_guild_channel_factory(channel_type: int): - cls, value = _guild_channel_factory(channel_type) - if value in (ChannelType.private_thread, ChannelType.public_thread, ChannelType.news_thread): - return Thread, value - return cls, value diff --git a/.venv/Lib/site-packages/discord/client.py b/.venv/Lib/site-packages/discord/client.py deleted file mode 100644 index c4c59e7..0000000 --- a/.venv/Lib/site-packages/discord/client.py +++ /dev/null @@ -1,2724 +0,0 @@ -""" -The MIT License (MIT) - -Copyright (c) 2015-present Rapptz - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the "Software"), -to deal in the Software without restriction, including without limitation -the rights to use, copy, modify, merge, publish, distribute, sublicense, -and/or sell copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. -""" - -from __future__ import annotations - -import asyncio -import datetime -import logging -from typing import ( - TYPE_CHECKING, - Any, - AsyncIterator, - Callable, - Coroutine, - Dict, - Generator, - List, - Literal, - Optional, - Sequence, - Tuple, - Type, - TypeVar, - Union, - overload, -) - -import aiohttp - -from .user import User, ClientUser -from .invite import Invite -from .template import Template -from .widget import Widget -from .guild import Guild -from .emoji import Emoji -from .channel import _threaded_channel_factory, PartialMessageable -from .enums import ChannelType -from .mentions import AllowedMentions -from .errors import * -from .enums import Status -from .flags import ApplicationFlags, Intents -from .gateway import * -from .activity import ActivityTypes, BaseActivity, create_activity -from .voice_client import VoiceClient -from .http import HTTPClient -from .state import ConnectionState -from . import utils -from .utils import MISSING, time_snowflake -from .object import Object -from .backoff import ExponentialBackoff -from .webhook import Webhook -from .appinfo import AppInfo -from .ui.view import View -from .stage_instance import StageInstance -from .threads import Thread -from .sticker import GuildSticker, StandardSticker, StickerPack, _sticker_factory - -if TYPE_CHECKING: - from types import TracebackType - - from typing_extensions import Self - - from .abc import Messageable, PrivateChannel, Snowflake, SnowflakeTime - from .app_commands import Command, ContextMenu - from .automod import AutoModAction, AutoModRule - from .channel import DMChannel, GroupChannel - from .ext.commands import AutoShardedBot, Bot, Context, CommandError - from .guild import GuildChannel - from .integrations import Integration - from .interactions import Interaction - from .member import Member, VoiceState - from .message import Message - from .raw_models import ( - RawAppCommandPermissionsUpdateEvent, - RawBulkMessageDeleteEvent, - RawIntegrationDeleteEvent, - RawMemberRemoveEvent, - RawMessageDeleteEvent, - RawMessageUpdateEvent, - RawReactionActionEvent, - RawReactionClearEmojiEvent, - RawReactionClearEvent, - RawThreadDeleteEvent, - RawThreadMembersUpdate, - RawThreadUpdateEvent, - RawTypingEvent, - ) - from .reaction import Reaction - from .role import Role - from .scheduled_event import ScheduledEvent - from .threads import ThreadMember - from .types.guild import Guild as GuildPayload - from .voice_client import VoiceProtocol - from .audit_logs import AuditLogEntry - - -# fmt: off -__all__ = ( - 'Client', -) -# fmt: on - -T = TypeVar('T') -Coro = Coroutine[Any, Any, T] -CoroT = TypeVar('CoroT', bound=Callable[..., Coro[Any]]) - -_log = logging.getLogger(__name__) - - -class _LoopSentinel: - __slots__ = () - - def __getattr__(self, attr: str) -> None: - msg = ( - 'loop attribute cannot be accessed in non-async contexts. ' - 'Consider using either an asynchronous main function and passing it to asyncio.run or ' - 'using asynchronous initialisation hooks such as Client.setup_hook' - ) - raise AttributeError(msg) - - -_loop: Any = _LoopSentinel() - - -class Client: - r"""Represents a client connection that connects to Discord. - This class is used to interact with the Discord WebSocket and API. - - .. container:: operations - - .. describe:: async with x - - Asynchronously initialises the client and automatically cleans up. - - .. versionadded:: 2.0 - - A number of options can be passed to the :class:`Client`. - - Parameters - ----------- - max_messages: Optional[:class:`int`] - The maximum number of messages to store in the internal message cache. - This defaults to ``1000``. Passing in ``None`` disables the message cache. - - .. versionchanged:: 1.3 - Allow disabling the message cache and change the default size to ``1000``. - proxy: Optional[:class:`str`] - Proxy URL. - proxy_auth: Optional[:class:`aiohttp.BasicAuth`] - An object that represents proxy HTTP Basic Authorization. - shard_id: Optional[:class:`int`] - Integer starting at ``0`` and less than :attr:`.shard_count`. - shard_count: Optional[:class:`int`] - The total number of shards. - application_id: :class:`int` - The client's application ID. - intents: :class:`Intents` - The intents that you want to enable for the session. This is a way of - disabling and enabling certain gateway events from triggering and being sent. - - .. versionadded:: 1.5 - - .. versionchanged:: 2.0 - Parameter is now required. - member_cache_flags: :class:`MemberCacheFlags` - Allows for finer control over how the library caches members. - If not given, defaults to cache as much as possible with the - currently selected intents. - - .. versionadded:: 1.5 - chunk_guilds_at_startup: :class:`bool` - Indicates if :func:`.on_ready` should be delayed to chunk all guilds - at start-up if necessary. This operation is incredibly slow for large - amounts of guilds. The default is ``True`` if :attr:`Intents.members` - is ``True``. - - .. versionadded:: 1.5 - status: Optional[:class:`.Status`] - A status to start your presence with upon logging on to Discord. - activity: Optional[:class:`.BaseActivity`] - An activity to start your presence with upon logging on to Discord. - allowed_mentions: Optional[:class:`AllowedMentions`] - Control how the client handles mentions by default on every message sent. - - .. versionadded:: 1.4 - heartbeat_timeout: :class:`float` - The maximum numbers of seconds before timing out and restarting the - WebSocket in the case of not receiving a HEARTBEAT_ACK. Useful if - processing the initial packets take too long to the point of disconnecting - you. The default timeout is 60 seconds. - guild_ready_timeout: :class:`float` - The maximum number of seconds to wait for the GUILD_CREATE stream to end before - preparing the member cache and firing READY. The default timeout is 2 seconds. - - .. versionadded:: 1.4 - assume_unsync_clock: :class:`bool` - Whether to assume the system clock is unsynced. This applies to the ratelimit handling - code. If this is set to ``True``, the default, then the library uses the time to reset - a rate limit bucket given by Discord. If this is ``False`` then your system clock is - used to calculate how long to sleep for. If this is set to ``False`` it is recommended to - sync your system clock to Google's NTP server. - - .. versionadded:: 1.3 - enable_debug_events: :class:`bool` - Whether to enable events that are useful only for debugging gateway related information. - - Right now this involves :func:`on_socket_raw_receive` and :func:`on_socket_raw_send`. If - this is ``False`` then those events will not be dispatched (due to performance considerations). - To enable these events, this must be set to ``True``. Defaults to ``False``. - - .. versionadded:: 2.0 - http_trace: :class:`aiohttp.TraceConfig` - The trace configuration to use for tracking HTTP requests the library does using ``aiohttp``. - This allows you to check requests the library is using. For more information, check the - `aiohttp documentation `_. - - .. versionadded:: 2.0 - max_ratelimit_timeout: Optional[:class:`float`] - The maximum number of seconds to wait when a non-global rate limit is encountered. - If a request requires sleeping for more than the seconds passed in, then - :exc:`~discord.RateLimited` will be raised. By default, there is no timeout limit. - In order to prevent misuse and unnecessary bans, the minimum value this can be - set to is ``30.0`` seconds. - - .. versionadded:: 2.0 - - Attributes - ----------- - ws - The websocket gateway the client is currently connected to. Could be ``None``. - """ - - def __init__(self, *, intents: Intents, **options: Any) -> None: - self.loop: asyncio.AbstractEventLoop = _loop - # self.ws is set in the connect method - self.ws: DiscordWebSocket = None # type: ignore - self._listeners: Dict[str, List[Tuple[asyncio.Future, Callable[..., bool]]]] = {} - self.shard_id: Optional[int] = options.get('shard_id') - self.shard_count: Optional[int] = options.get('shard_count') - - proxy: Optional[str] = options.pop('proxy', None) - proxy_auth: Optional[aiohttp.BasicAuth] = options.pop('proxy_auth', None) - unsync_clock: bool = options.pop('assume_unsync_clock', True) - http_trace: Optional[aiohttp.TraceConfig] = options.pop('http_trace', None) - max_ratelimit_timeout: Optional[float] = options.pop('max_ratelimit_timeout', None) - self.http: HTTPClient = HTTPClient( - self.loop, - proxy=proxy, - proxy_auth=proxy_auth, - unsync_clock=unsync_clock, - http_trace=http_trace, - max_ratelimit_timeout=max_ratelimit_timeout, - ) - - self._handlers: Dict[str, Callable[..., None]] = { - 'ready': self._handle_ready, - } - - self._hooks: Dict[str, Callable[..., Coroutine[Any, Any, Any]]] = { - 'before_identify': self._call_before_identify_hook, - } - - self._enable_debug_events: bool = options.pop('enable_debug_events', False) - self._connection: ConnectionState[Self] = self._get_state(intents=intents, **options) - self._connection.shard_count = self.shard_count - self._closed: bool = False - self._ready: asyncio.Event = MISSING - self._application: Optional[AppInfo] = None - self._connection._get_websocket = self._get_websocket - self._connection._get_client = lambda: self - - if VoiceClient.warn_nacl: - VoiceClient.warn_nacl = False - _log.warning("PyNaCl is not installed, voice will NOT be supported") - - async def __aenter__(self) -> Self: - await self._async_setup_hook() - return self - - async def __aexit__( - self, - exc_type: Optional[Type[BaseException]], - exc_value: Optional[BaseException], - traceback: Optional[TracebackType], - ) -> None: - if not self.is_closed(): - await self.close() - - # internals - - def _get_websocket(self, guild_id: Optional[int] = None, *, shard_id: Optional[int] = None) -> DiscordWebSocket: - return self.ws - - def _get_state(self, **options: Any) -> ConnectionState: - return ConnectionState(dispatch=self.dispatch, handlers=self._handlers, hooks=self._hooks, http=self.http, **options) - - def _handle_ready(self) -> None: - self._ready.set() - - @property - def latency(self) -> float: - """:class:`float`: Measures latency between a HEARTBEAT and a HEARTBEAT_ACK in seconds. - - This could be referred to as the Discord WebSocket protocol latency. - """ - ws = self.ws - return float('nan') if not ws else ws.latency - - def is_ws_ratelimited(self) -> bool: - """:class:`bool`: Whether the websocket is currently rate limited. - - This can be useful to know when deciding whether you should query members - using HTTP or via the gateway. - - .. versionadded:: 1.6 - """ - if self.ws: - return self.ws.is_ratelimited() - return False - - @property - def user(self) -> Optional[ClientUser]: - """Optional[:class:`.ClientUser`]: Represents the connected client. ``None`` if not logged in.""" - return self._connection.user - - @property - def guilds(self) -> Sequence[Guild]: - """Sequence[:class:`.Guild`]: The guilds that the connected client is a member of.""" - return self._connection.guilds - - @property - def emojis(self) -> Sequence[Emoji]: - """Sequence[:class:`.Emoji`]: The emojis that the connected client has.""" - return self._connection.emojis - - @property - def stickers(self) -> Sequence[GuildSticker]: - """Sequence[:class:`.GuildSticker`]: The stickers that the connected client has. - - .. versionadded:: 2.0 - """ - return self._connection.stickers - - @property - def cached_messages(self) -> Sequence[Message]: - """Sequence[:class:`.Message`]: Read-only list of messages the connected client has cached. - - .. versionadded:: 1.1 - """ - return utils.SequenceProxy(self._connection._messages or []) - - @property - def private_channels(self) -> Sequence[PrivateChannel]: - """Sequence[:class:`.abc.PrivateChannel`]: The private channels that the connected client is participating on. - - .. note:: - - This returns only up to 128 most recent private channels due to an internal working - on how Discord deals with private channels. - """ - return self._connection.private_channels - - @property - def voice_clients(self) -> List[VoiceProtocol]: - """List[:class:`.VoiceProtocol`]: Represents a list of voice connections. - - These are usually :class:`.VoiceClient` instances. - """ - return self._connection.voice_clients - - @property - def application_id(self) -> Optional[int]: - """Optional[:class:`int`]: The client's application ID. - - If this is not passed via ``__init__`` then this is retrieved - through the gateway when an event contains the data or after a call - to :meth:`~discord.Client.login`. Usually after :func:`~discord.on_connect` - is called. - - .. versionadded:: 2.0 - """ - return self._connection.application_id - - @property - def application_flags(self) -> ApplicationFlags: - """:class:`~discord.ApplicationFlags`: The client's application flags. - - .. versionadded:: 2.0 - """ - return self._connection.application_flags - - @property - def application(self) -> Optional[AppInfo]: - """Optional[:class:`~discord.AppInfo`]: The client's application info. - - This is retrieved on :meth:`~discord.Client.login` and is not updated - afterwards. This allows populating the application_id without requiring a - gateway connection. - - This is ``None`` if accessed before :meth:`~discord.Client.login` is called. - - .. seealso:: The :meth:`~discord.Client.application_info` API call - - .. versionadded:: 2.0 - """ - return self._application - - def is_ready(self) -> bool: - """:class:`bool`: Specifies if the client's internal cache is ready for use.""" - return self._ready is not MISSING and self._ready.is_set() - - async def _run_event( - self, - coro: Callable[..., Coroutine[Any, Any, Any]], - event_name: str, - *args: Any, - **kwargs: Any, - ) -> None: - try: - await coro(*args, **kwargs) - except asyncio.CancelledError: - pass - except Exception: - try: - await self.on_error(event_name, *args, **kwargs) - except asyncio.CancelledError: - pass - - def _schedule_event( - self, - coro: Callable[..., Coroutine[Any, Any, Any]], - event_name: str, - *args: Any, - **kwargs: Any, - ) -> asyncio.Task: - wrapped = self._run_event(coro, event_name, *args, **kwargs) - # Schedules the task - return self.loop.create_task(wrapped, name=f'discord.py: {event_name}') - - def dispatch(self, event: str, /, *args: Any, **kwargs: Any) -> None: - _log.debug('Dispatching event %s', event) - method = 'on_' + event - - listeners = self._listeners.get(event) - if listeners: - removed = [] - for i, (future, condition) in enumerate(listeners): - if future.cancelled(): - removed.append(i) - continue - - try: - result = condition(*args) - except Exception as exc: - future.set_exception(exc) - removed.append(i) - else: - if result: - if len(args) == 0: - future.set_result(None) - elif len(args) == 1: - future.set_result(args[0]) - else: - future.set_result(args) - removed.append(i) - - if len(removed) == len(listeners): - self._listeners.pop(event) - else: - for idx in reversed(removed): - del listeners[idx] - - try: - coro = getattr(self, method) - except AttributeError: - pass - else: - self._schedule_event(coro, method, *args, **kwargs) - - async def on_error(self, event_method: str, /, *args: Any, **kwargs: Any) -> None: - """|coro| - - The default error handler provided by the client. - - By default this logs to the library logger however it could be - overridden to have a different implementation. - Check :func:`~discord.on_error` for more details. - - .. versionchanged:: 2.0 - - ``event_method`` parameter is now positional-only - and instead of writing to ``sys.stderr`` it logs instead. - """ - _log.exception('Ignoring exception in %s', event_method) - - # hooks - - async def _call_before_identify_hook(self, shard_id: Optional[int], *, initial: bool = False) -> None: - # This hook is an internal hook that actually calls the public one. - # It allows the library to have its own hook without stepping on the - # toes of those who need to override their own hook. - await self.before_identify_hook(shard_id, initial=initial) - - async def before_identify_hook(self, shard_id: Optional[int], *, initial: bool = False) -> None: - """|coro| - - A hook that is called before IDENTIFYing a session. This is useful - if you wish to have more control over the synchronization of multiple - IDENTIFYing clients. - - The default implementation sleeps for 5 seconds. - - .. versionadded:: 1.4 - - Parameters - ------------ - shard_id: :class:`int` - The shard ID that requested being IDENTIFY'd - initial: :class:`bool` - Whether this IDENTIFY is the first initial IDENTIFY. - """ - - if not initial: - await asyncio.sleep(5.0) - - async def _async_setup_hook(self) -> None: - # Called whenever the client needs to initialise asyncio objects with a running loop - loop = asyncio.get_running_loop() - self.loop = loop - self.http.loop = loop - self._connection.loop = loop - - self._ready = asyncio.Event() - - async def setup_hook(self) -> None: - """|coro| - - A coroutine to be called to setup the bot, by default this is blank. - - To perform asynchronous setup after the bot is logged in but before - it has connected to the Websocket, overwrite this coroutine. - - This is only called once, in :meth:`login`, and will be called before - any events are dispatched, making it a better solution than doing such - setup in the :func:`~discord.on_ready` event. - - .. warning:: - - Since this is called *before* the websocket connection is made therefore - anything that waits for the websocket will deadlock, this includes things - like :meth:`wait_for` and :meth:`wait_until_ready`. - - .. versionadded:: 2.0 - """ - pass - - # login state management - - async def login(self, token: str) -> None: - """|coro| - - Logs in the client with the specified credentials and - calls the :meth:`setup_hook`. - - - Parameters - ----------- - token: :class:`str` - The authentication token. Do not prefix this token with - anything as the library will do it for you. - - Raises - ------ - LoginFailure - The wrong credentials are passed. - HTTPException - An unknown HTTP related error occurred, - usually when it isn't 200 or the known incorrect credentials - passing status code. - """ - - _log.info('logging in using static token') - - if self.loop is _loop: - await self._async_setup_hook() - - if not isinstance(token, str): - raise TypeError(f'expected token to be a str, received {token.__class__.__name__} instead') - token = token.strip() - - data = await self.http.static_login(token) - self._connection.user = ClientUser(state=self._connection, data=data) - self._application = await self.application_info() - if self._connection.application_id is None: - self._connection.application_id = self._application.id - - if not self._connection.application_flags: - self._connection.application_flags = self._application.flags - - await self.setup_hook() - - async def connect(self, *, reconnect: bool = True) -> None: - """|coro| - - Creates a websocket connection and lets the websocket listen - to messages from Discord. This is a loop that runs the entire - event system and miscellaneous aspects of the library. Control - is not resumed until the WebSocket connection is terminated. - - Parameters - ----------- - reconnect: :class:`bool` - If we should attempt reconnecting, either due to internet - failure or a specific failure on Discord's part. Certain - disconnects that lead to bad state will not be handled (such as - invalid sharding payloads or bad tokens). - - Raises - ------- - GatewayNotFound - If the gateway to connect to Discord is not found. Usually if this - is thrown then there is a Discord API outage. - ConnectionClosed - The websocket connection has been terminated. - """ - - backoff = ExponentialBackoff() - ws_params = { - 'initial': True, - 'shard_id': self.shard_id, - } - while not self.is_closed(): - try: - coro = DiscordWebSocket.from_client(self, **ws_params) - self.ws = await asyncio.wait_for(coro, timeout=60.0) - ws_params['initial'] = False - while True: - await self.ws.poll_event() - except ReconnectWebSocket as e: - _log.debug('Got a request to %s the websocket.', e.op) - self.dispatch('disconnect') - ws_params.update(sequence=self.ws.sequence, resume=e.resume, session=self.ws.session_id) - if e.resume: - ws_params['gateway'] = self.ws.gateway - continue - except ( - OSError, - HTTPException, - GatewayNotFound, - ConnectionClosed, - aiohttp.ClientError, - asyncio.TimeoutError, - ) as exc: - - self.dispatch('disconnect') - if not reconnect: - await self.close() - if isinstance(exc, ConnectionClosed) and exc.code == 1000: - # clean close, don't re-raise this - return - raise - - if self.is_closed(): - return - - # If we get connection reset by peer then try to RESUME - if isinstance(exc, OSError) and exc.errno in (54, 10054): - ws_params.update( - sequence=self.ws.sequence, - gateway=self.ws.gateway, - initial=False, - resume=True, - session=self.ws.session_id, - ) - continue - - # We should only get this when an unhandled close code happens, - # such as a clean disconnect (1000) or a bad state (bad token, no sharding, etc) - # sometimes, discord sends us 1000 for unknown reasons so we should reconnect - # regardless and rely on is_closed instead - if isinstance(exc, ConnectionClosed): - if exc.code == 4014: - raise PrivilegedIntentsRequired(exc.shard_id) from None - if exc.code != 1000: - await self.close() - raise - - retry = backoff.delay() - _log.exception("Attempting a reconnect in %.2fs", retry) - await asyncio.sleep(retry) - # Always try to RESUME the connection - # If the connection is not RESUME-able then the gateway will invalidate the session. - # This is apparently what the official Discord client does. - ws_params.update( - sequence=self.ws.sequence, - gateway=self.ws.gateway, - resume=True, - session=self.ws.session_id, - ) - - async def close(self) -> None: - """|coro| - - Closes the connection to Discord. - """ - if self._closed: - return - - self._closed = True - - await self._connection.close() - - if self.ws is not None and self.ws.open: - await self.ws.close(code=1000) - - await self.http.close() - - if self._ready is not MISSING: - self._ready.clear() - - self.loop = MISSING - - def clear(self) -> None: - """Clears the internal state of the bot. - - After this, the bot can be considered "re-opened", i.e. :meth:`is_closed` - and :meth:`is_ready` both return ``False`` along with the bot's internal - cache cleared. - """ - self._closed = False - self._ready.clear() - self._connection.clear() - self.http.clear() - - async def start(self, token: str, *, reconnect: bool = True) -> None: - """|coro| - - A shorthand coroutine for :meth:`login` + :meth:`connect`. - - Parameters - ----------- - token: :class:`str` - The authentication token. Do not prefix this token with - anything as the library will do it for you. - reconnect: :class:`bool` - If we should attempt reconnecting, either due to internet - failure or a specific failure on Discord's part. Certain - disconnects that lead to bad state will not be handled (such as - invalid sharding payloads or bad tokens). - - Raises - ------- - TypeError - An unexpected keyword argument was received. - """ - await self.login(token) - await self.connect(reconnect=reconnect) - - def run( - self, - token: str, - *, - reconnect: bool = True, - log_handler: Optional[logging.Handler] = MISSING, - log_formatter: logging.Formatter = MISSING, - log_level: int = MISSING, - root_logger: bool = False, - ) -> None: - """A blocking call that abstracts away the event loop - initialisation from you. - - If you want more control over the event loop then this - function should not be used. Use :meth:`start` coroutine - or :meth:`connect` + :meth:`login`. - - This function also sets up the logging library to make it easier - for beginners to know what is going on with the library. For more - advanced users, this can be disabled by passing ``None`` to - the ``log_handler`` parameter. - - .. warning:: - - This function must be the last function to call due to the fact that it - is blocking. That means that registration of events or anything being - called after this function call will not execute until it returns. - - Parameters - ----------- - token: :class:`str` - The authentication token. Do not prefix this token with - anything as the library will do it for you. - reconnect: :class:`bool` - If we should attempt reconnecting, either due to internet - failure or a specific failure on Discord's part. Certain - disconnects that lead to bad state will not be handled (such as - invalid sharding payloads or bad tokens). - log_handler: Optional[:class:`logging.Handler`] - The log handler to use for the library's logger. If this is ``None`` - then the library will not set up anything logging related. Logging - will still work if ``None`` is passed, though it is your responsibility - to set it up. - - The default log handler if not provided is :class:`logging.StreamHandler`. - - .. versionadded:: 2.0 - log_formatter: :class:`logging.Formatter` - The formatter to use with the given log handler. If not provided then it - defaults to a colour based logging formatter (if available). - - .. versionadded:: 2.0 - log_level: :class:`int` - The default log level for the library's logger. This is only applied if the - ``log_handler`` parameter is not ``None``. Defaults to ``logging.INFO``. - - .. versionadded:: 2.0 - root_logger: :class:`bool` - Whether to set up the root logger rather than the library logger. - By default, only the library logger (``'discord'``) is set up. If this - is set to ``True`` then the root logger is set up as well. - - Defaults to ``False``. - - .. versionadded:: 2.0 - """ - - async def runner(): - async with self: - await self.start(token, reconnect=reconnect) - - if log_handler is not None: - utils.setup_logging( - handler=log_handler, - formatter=log_formatter, - level=log_level, - root=root_logger, - ) - - try: - asyncio.run(runner()) - except KeyboardInterrupt: - # nothing to do here - # `asyncio.run` handles the loop cleanup - # and `self.start` closes all sockets and the HTTPClient instance. - return - - # properties - - def is_closed(self) -> bool: - """:class:`bool`: Indicates if the websocket connection is closed.""" - return self._closed - - @property - def activity(self) -> Optional[ActivityTypes]: - """Optional[:class:`.BaseActivity`]: The activity being used upon - logging in. - """ - return create_activity(self._connection._activity, self._connection) - - @activity.setter - def activity(self, value: Optional[ActivityTypes]) -> None: - if value is None: - self._connection._activity = None - elif isinstance(value, BaseActivity): - # ConnectionState._activity is typehinted as ActivityPayload, we're passing Dict[str, Any] - self._connection._activity = value.to_dict() # type: ignore - else: - raise TypeError('activity must derive from BaseActivity.') - - @property - def status(self) -> Status: - """:class:`.Status`: - The status being used upon logging on to Discord. - - .. versionadded: 2.0 - """ - if self._connection._status in set(state.value for state in Status): - return Status(self._connection._status) - return Status.online - - @status.setter - def status(self, value: Status) -> None: - if value is Status.offline: - self._connection._status = 'invisible' - elif isinstance(value, Status): - self._connection._status = str(value) - else: - raise TypeError('status must derive from Status.') - - @property - def allowed_mentions(self) -> Optional[AllowedMentions]: - """Optional[:class:`~discord.AllowedMentions`]: The allowed mention configuration. - - .. versionadded:: 1.4 - """ - return self._connection.allowed_mentions - - @allowed_mentions.setter - def allowed_mentions(self, value: Optional[AllowedMentions]) -> None: - if value is None or isinstance(value, AllowedMentions): - self._connection.allowed_mentions = value - else: - raise TypeError(f'allowed_mentions must be AllowedMentions not {value.__class__.__name__}') - - @property - def intents(self) -> Intents: - """:class:`~discord.Intents`: The intents configured for this connection. - - .. versionadded:: 1.5 - """ - return self._connection.intents - - # helpers/getters - - @property - def users(self) -> List[User]: - """List[:class:`~discord.User`]: Returns a list of all the users the bot can see.""" - return list(self._connection._users.values()) - - def get_channel(self, id: int, /) -> Optional[Union[GuildChannel, Thread, PrivateChannel]]: - """Returns a channel or thread with the given ID. - - .. versionchanged:: 2.0 - - ``id`` parameter is now positional-only. - - Parameters - ----------- - id: :class:`int` - The ID to search for. - - Returns - -------- - Optional[Union[:class:`.abc.GuildChannel`, :class:`.Thread`, :class:`.abc.PrivateChannel`]] - The returned channel or ``None`` if not found. - """ - return self._connection.get_channel(id) # type: ignore # The cache contains all channel types - - def get_partial_messageable( - self, id: int, *, guild_id: Optional[int] = None, type: Optional[ChannelType] = None - ) -> PartialMessageable: - """Returns a partial messageable with the given channel ID. - - This is useful if you have a channel_id but don't want to do an API call - to send messages to it. - - .. versionadded:: 2.0 - - Parameters - ----------- - id: :class:`int` - The channel ID to create a partial messageable for. - guild_id: Optional[:class:`int`] - The optional guild ID to create a partial messageable for. - - This is not required to actually send messages, but it does allow the - :meth:`~discord.PartialMessageable.jump_url` and - :attr:`~discord.PartialMessageable.guild` properties to function properly. - type: Optional[:class:`.ChannelType`] - The underlying channel type for the partial messageable. - - Returns - -------- - :class:`.PartialMessageable` - The partial messageable - """ - return PartialMessageable(state=self._connection, id=id, guild_id=guild_id, type=type) - - def get_stage_instance(self, id: int, /) -> Optional[StageInstance]: - """Returns a stage instance with the given stage channel ID. - - .. versionadded:: 2.0 - - Parameters - ----------- - id: :class:`int` - The ID to search for. - - Returns - -------- - Optional[:class:`.StageInstance`] - The stage instance or ``None`` if not found. - """ - from .channel import StageChannel - - channel = self._connection.get_channel(id) - - if isinstance(channel, StageChannel): - return channel.instance - - def get_guild(self, id: int, /) -> Optional[Guild]: - """Returns a guild with the given ID. - - .. versionchanged:: 2.0 - - ``id`` parameter is now positional-only. - - Parameters - ----------- - id: :class:`int` - The ID to search for. - - Returns - -------- - Optional[:class:`.Guild`] - The guild or ``None`` if not found. - """ - return self._connection._get_guild(id) - - def get_user(self, id: int, /) -> Optional[User]: - """Returns a user with the given ID. - - .. versionchanged:: 2.0 - - ``id`` parameter is now positional-only. - - Parameters - ----------- - id: :class:`int` - The ID to search for. - - Returns - -------- - Optional[:class:`~discord.User`] - The user or ``None`` if not found. - """ - return self._connection.get_user(id) - - def get_emoji(self, id: int, /) -> Optional[Emoji]: - """Returns an emoji with the given ID. - - .. versionchanged:: 2.0 - - ``id`` parameter is now positional-only. - - Parameters - ----------- - id: :class:`int` - The ID to search for. - - Returns - -------- - Optional[:class:`.Emoji`] - The custom emoji or ``None`` if not found. - """ - return self._connection.get_emoji(id) - - def get_sticker(self, id: int, /) -> Optional[GuildSticker]: - """Returns a guild sticker with the given ID. - - .. versionadded:: 2.0 - - .. note:: - - To retrieve standard stickers, use :meth:`.fetch_sticker`. - or :meth:`.fetch_premium_sticker_packs`. - - Returns - -------- - Optional[:class:`.GuildSticker`] - The sticker or ``None`` if not found. - """ - return self._connection.get_sticker(id) - - def get_all_channels(self) -> Generator[GuildChannel, None, None]: - """A generator that retrieves every :class:`.abc.GuildChannel` the client can 'access'. - - This is equivalent to: :: - - for guild in client.guilds: - for channel in guild.channels: - yield channel - - .. note:: - - Just because you receive a :class:`.abc.GuildChannel` does not mean that - you can communicate in said channel. :meth:`.abc.GuildChannel.permissions_for` should - be used for that. - - Yields - ------ - :class:`.abc.GuildChannel` - A channel the client can 'access'. - """ - - for guild in self.guilds: - yield from guild.channels - - def get_all_members(self) -> Generator[Member, None, None]: - """Returns a generator with every :class:`.Member` the client can see. - - This is equivalent to: :: - - for guild in client.guilds: - for member in guild.members: - yield member - - Yields - ------ - :class:`.Member` - A member the client can see. - """ - for guild in self.guilds: - yield from guild.members - - # listeners/waiters - - async def wait_until_ready(self) -> None: - """|coro| - - Waits until the client's internal cache is all ready. - - .. warning:: - - Calling this inside :meth:`setup_hook` can lead to a deadlock. - """ - if self._ready is not MISSING: - await self._ready.wait() - else: - raise RuntimeError( - 'Client has not been properly initialised. ' - 'Please use the login method or asynchronous context manager before calling this method' - ) - - # App Commands - - @overload - async def wait_for( - self, - event: Literal['raw_app_command_permissions_update'], - /, - *, - check: Optional[Callable[[RawAppCommandPermissionsUpdateEvent], bool]], - timeout: Optional[float] = None, - ) -> RawAppCommandPermissionsUpdateEvent: - ... - - @overload - async def wait_for( - self, - event: Literal['app_command_completion'], - /, - *, - check: Optional[Callable[[Interaction[Self], Union[Command[Any, ..., Any], ContextMenu]], bool]], - timeout: Optional[float] = None, - ) -> Tuple[Interaction[Self], Union[Command[Any, ..., Any], ContextMenu]]: - ... - - # AutoMod - - @overload - async def wait_for( - self, - event: Literal['automod_rule_create', 'automod_rule_update', 'automod_rule_delete'], - /, - *, - check: Optional[Callable[[AutoModRule], bool]], - timeout: Optional[float] = None, - ) -> AutoModRule: - ... - - @overload - async def wait_for( - self, - event: Literal['automod_action'], - /, - *, - check: Optional[Callable[[AutoModAction], bool]], - timeout: Optional[float] = None, - ) -> AutoModAction: - ... - - # Channels - - @overload - async def wait_for( - self, - event: Literal['private_channel_update'], - /, - *, - check: Optional[Callable[[GroupChannel, GroupChannel], bool]], - timeout: Optional[float] = None, - ) -> Tuple[GroupChannel, GroupChannel]: - ... - - @overload - async def wait_for( - self, - event: Literal['private_channel_pins_update'], - /, - *, - check: Optional[Callable[[PrivateChannel, datetime.datetime], bool]], - timeout: Optional[float] = None, - ) -> Tuple[PrivateChannel, datetime.datetime]: - ... - - @overload - async def wait_for( - self, - event: Literal['guild_channel_delete', 'guild_channel_create'], - /, - *, - check: Optional[Callable[[GuildChannel], bool]], - timeout: Optional[float] = None, - ) -> GuildChannel: - ... - - @overload - async def wait_for( - self, - event: Literal['guild_channel_update'], - /, - *, - check: Optional[Callable[[GuildChannel, GuildChannel], bool]], - timeout: Optional[float] = None, - ) -> Tuple[GuildChannel, GuildChannel]: - ... - - @overload - async def wait_for( - self, - event: Literal['guild_channel_pins_update'], - /, - *, - check: Optional[ - Callable[ - [Union[GuildChannel, Thread], Optional[datetime.datetime]], - bool, - ] - ], - timeout: Optional[float] = None, - ) -> Tuple[Union[GuildChannel, Thread], Optional[datetime.datetime]]: - ... - - @overload - async def wait_for( - self, - event: Literal['typing'], - /, - *, - check: Optional[Callable[[Messageable, Union[User, Member], datetime.datetime], bool]], - timeout: Optional[float] = None, - ) -> Tuple[Messageable, Union[User, Member], datetime.datetime]: - ... - - @overload - async def wait_for( - self, - event: Literal['raw_typing'], - /, - *, - check: Optional[Callable[[RawTypingEvent], bool]], - timeout: Optional[float] = None, - ) -> RawTypingEvent: - ... - - # Debug & Gateway events - - @overload - async def wait_for( - self, - event: Literal['connect', 'disconnect', 'ready', 'resumed'], - /, - *, - check: Optional[Callable[[], bool]], - timeout: Optional[float] = None, - ) -> None: - ... - - @overload - async def wait_for( - self, - event: Literal['shard_connect', 'shard_disconnect', 'shard_ready', 'shard_resumed'], - /, - *, - check: Optional[Callable[[int], bool]], - timeout: Optional[float] = None, - ) -> int: - ... - - @overload - async def wait_for( - self, - event: Literal['socket_event_type', 'socket_raw_receive'], - /, - *, - check: Optional[Callable[[str], bool]], - timeout: Optional[float] = None, - ) -> str: - ... - - @overload - async def wait_for( - self, - event: Literal['socket_raw_send'], - /, - *, - check: Optional[Callable[[Union[str, bytes]], bool]], - timeout: Optional[float] = None, - ) -> Union[str, bytes]: - ... - - # Guilds - - @overload - async def wait_for( - self, - event: Literal[ - 'guild_available', - 'guild_unavailable', - 'guild_join', - 'guild_remove', - ], - /, - *, - check: Optional[Callable[[Guild], bool]], - timeout: Optional[float] = None, - ) -> Guild: - ... - - @overload - async def wait_for( - self, - event: Literal['guild_update'], - /, - *, - check: Optional[Callable[[Guild, Guild], bool]], - timeout: Optional[float] = None, - ) -> Tuple[Guild, Guild]: - ... - - @overload - async def wait_for( - self, - event: Literal['guild_emojis_update'], - /, - *, - check: Optional[Callable[[Guild, Sequence[Emoji], Sequence[Emoji]], bool]], - timeout: Optional[float] = None, - ) -> Tuple[Guild, Sequence[Emoji], Sequence[Emoji]]: - ... - - @overload - async def wait_for( - self, - event: Literal['guild_stickers_update'], - /, - *, - check: Optional[Callable[[Guild, Sequence[GuildSticker], Sequence[GuildSticker]], bool]], - timeout: Optional[float] = None, - ) -> Tuple[Guild, Sequence[GuildSticker], Sequence[GuildSticker]]: - ... - - @overload - async def wait_for( - self, - event: Literal['invite_create', 'invite_delete'], - /, - *, - check: Optional[Callable[[Invite], bool]], - timeout: Optional[float] = None, - ) -> Invite: - ... - - @overload - async def wait_for( - self, - event: Literal['audit_log_entry_create'], - /, - *, - check: Optional[Callable[[AuditLogEntry], bool]], - timeout: Optional[float] = None, - ) -> AuditLogEntry: - ... - - # Integrations - - @overload - async def wait_for( - self, - event: Literal['integration_create', 'integration_update'], - /, - *, - check: Optional[Callable[[Integration], bool]], - timeout: Optional[float] = None, - ) -> Integration: - ... - - @overload - async def wait_for( - self, - event: Literal['guild_integrations_update'], - /, - *, - check: Optional[Callable[[Guild], bool]], - timeout: Optional[float] = None, - ) -> Guild: - ... - - @overload - async def wait_for( - self, - event: Literal['webhooks_update'], - /, - *, - check: Optional[Callable[[GuildChannel], bool]], - timeout: Optional[float] = None, - ) -> GuildChannel: - ... - - @overload - async def wait_for( - self, - event: Literal['raw_integration_delete'], - /, - *, - check: Optional[Callable[[RawIntegrationDeleteEvent], bool]], - timeout: Optional[float] = None, - ) -> RawIntegrationDeleteEvent: - ... - - # Interactions - - @overload - async def wait_for( - self, - event: Literal['interaction'], - /, - *, - check: Optional[Callable[[Interaction[Self]], bool]], - timeout: Optional[float] = None, - ) -> Interaction[Self]: - ... - - # Members - - @overload - async def wait_for( - self, - event: Literal['member_join', 'member_remove'], - /, - *, - check: Optional[Callable[[Member], bool]], - timeout: Optional[float] = None, - ) -> Member: - ... - - @overload - async def wait_for( - self, - event: Literal['raw_member_remove'], - /, - *, - check: Optional[Callable[[RawMemberRemoveEvent], bool]], - timeout: Optional[float] = None, - ) -> RawMemberRemoveEvent: - ... - - @overload - async def wait_for( - self, - event: Literal['member_update', 'presence_update'], - /, - *, - check: Optional[Callable[[Member, Member], bool]], - timeout: Optional[float] = None, - ) -> Tuple[Member, Member]: - ... - - @overload - async def wait_for( - self, - event: Literal['user_update'], - /, - *, - check: Optional[Callable[[User, User], bool]], - timeout: Optional[float] = None, - ) -> Tuple[User, User]: - ... - - @overload - async def wait_for( - self, - event: Literal['member_ban'], - /, - *, - check: Optional[Callable[[Guild, Union[User, Member]], bool]], - timeout: Optional[float] = None, - ) -> Tuple[Guild, Union[User, Member]]: - ... - - @overload - async def wait_for( - self, - event: Literal['member_unban'], - /, - *, - check: Optional[Callable[[Guild, User], bool]], - timeout: Optional[float] = None, - ) -> Tuple[Guild, User]: - ... - - # Messages - - @overload - async def wait_for( - self, - event: Literal['message', 'message_delete'], - /, - *, - check: Optional[Callable[[Message], bool]], - timeout: Optional[float] = None, - ) -> Message: - ... - - @overload - async def wait_for( - self, - event: Literal['message_edit'], - /, - *, - check: Optional[Callable[[Message, Message], bool]], - timeout: Optional[float] = None, - ) -> Tuple[Message, Message]: - ... - - @overload - async def wait_for( - self, - event: Literal['bulk_message_delete'], - /, - *, - check: Optional[Callable[[List[Message]], bool]], - timeout: Optional[float] = None, - ) -> List[Message]: - ... - - @overload - async def wait_for( - self, - event: Literal['raw_message_edit'], - /, - *, - check: Optional[Callable[[RawMessageUpdateEvent], bool]], - timeout: Optional[float] = None, - ) -> RawMessageUpdateEvent: - ... - - @overload - async def wait_for( - self, - event: Literal['raw_message_delete'], - /, - *, - check: Optional[Callable[[RawMessageDeleteEvent], bool]], - timeout: Optional[float] = None, - ) -> RawMessageDeleteEvent: - ... - - @overload - async def wait_for( - self, - event: Literal['raw_bulk_message_delete'], - /, - *, - check: Optional[Callable[[RawBulkMessageDeleteEvent], bool]], - timeout: Optional[float] = None, - ) -> RawBulkMessageDeleteEvent: - ... - - # Reactions - - @overload - async def wait_for( - self, - event: Literal['reaction_add', 'reaction_remove'], - /, - *, - check: Optional[Callable[[Reaction, Union[Member, User]], bool]], - timeout: Optional[float] = None, - ) -> Tuple[Reaction, Union[Member, User]]: - ... - - @overload - async def wait_for( - self, - event: Literal['reaction_clear'], - /, - *, - check: Optional[Callable[[Message, List[Reaction]], bool]], - timeout: Optional[float] = None, - ) -> Tuple[Message, List[Reaction]]: - ... - - @overload - async def wait_for( - self, - event: Literal['reaction_clear_emoji'], - /, - *, - check: Optional[Callable[[Reaction], bool]], - timeout: Optional[float] = None, - ) -> Reaction: - ... - - @overload - async def wait_for( - self, - event: Literal['raw_reaction_add', 'raw_reaction_remove'], - /, - *, - check: Optional[Callable[[RawReactionActionEvent], bool]], - timeout: Optional[float] = None, - ) -> RawReactionActionEvent: - ... - - @overload - async def wait_for( - self, - event: Literal['raw_reaction_clear'], - /, - *, - check: Optional[Callable[[RawReactionClearEvent], bool]], - timeout: Optional[float] = None, - ) -> RawReactionClearEvent: - ... - - @overload - async def wait_for( - self, - event: Literal['raw_reaction_clear_emoji'], - /, - *, - check: Optional[Callable[[RawReactionClearEmojiEvent], bool]], - timeout: Optional[float] = None, - ) -> RawReactionClearEmojiEvent: - ... - - # Roles - - @overload - async def wait_for( - self, - event: Literal['guild_role_create', 'guild_role_delete'], - /, - *, - check: Optional[Callable[[Role], bool]], - timeout: Optional[float] = None, - ) -> Role: - ... - - @overload - async def wait_for( - self, - event: Literal['guild_role_update'], - /, - *, - check: Optional[Callable[[Role, Role], bool]], - timeout: Optional[float] = None, - ) -> Tuple[Role, Role]: - ... - - # Scheduled Events - - @overload - async def wait_for( - self, - event: Literal['scheduled_event_create', 'scheduled_event_delete'], - /, - *, - check: Optional[Callable[[ScheduledEvent], bool]], - timeout: Optional[float] = None, - ) -> ScheduledEvent: - ... - - @overload - async def wait_for( - self, - event: Literal['scheduled_event_user_add', 'scheduled_event_user_remove'], - /, - *, - check: Optional[Callable[[ScheduledEvent, User], bool]], - timeout: Optional[float] = None, - ) -> Tuple[ScheduledEvent, User]: - ... - - # Stages - - @overload - async def wait_for( - self, - event: Literal['stage_instance_create', 'stage_instance_delete'], - /, - *, - check: Optional[Callable[[StageInstance], bool]], - timeout: Optional[float] = None, - ) -> StageInstance: - ... - - @overload - async def wait_for( - self, - event: Literal['stage_instance_update'], - /, - *, - check: Optional[Callable[[StageInstance, StageInstance], bool]], - timeout: Optional[float] = None, - ) -> Coroutine[Any, Any, Tuple[StageInstance, StageInstance]]: - ... - - # Threads - @overload - async def wait_for( - self, - event: Literal['thread_create', 'thread_join', 'thread_remove', 'thread_delete'], - /, - *, - check: Optional[Callable[[Thread], bool]], - timeout: Optional[float] = None, - ) -> Thread: - ... - - @overload - async def wait_for( - self, - event: Literal['thread_update'], - /, - *, - check: Optional[Callable[[Thread, Thread], bool]], - timeout: Optional[float] = None, - ) -> Tuple[Thread, Thread]: - ... - - @overload - async def wait_for( - self, - event: Literal['raw_thread_update'], - /, - *, - check: Optional[Callable[[RawThreadUpdateEvent], bool]], - timeout: Optional[float] = None, - ) -> RawThreadUpdateEvent: - ... - - @overload - async def wait_for( - self, - event: Literal['raw_thread_delete'], - /, - *, - check: Optional[Callable[[RawThreadDeleteEvent], bool]], - timeout: Optional[float] = None, - ) -> RawThreadDeleteEvent: - ... - - @overload - async def wait_for( - self, - event: Literal['thread_member_join', 'thread_member_remove'], - /, - *, - check: Optional[Callable[[ThreadMember], bool]], - timeout: Optional[float] = None, - ) -> ThreadMember: - ... - - @overload - async def wait_for( - self, - event: Literal['raw_thread_member_remove'], - /, - *, - check: Optional[Callable[[RawThreadMembersUpdate], bool]], - timeout: Optional[float] = None, - ) -> RawThreadMembersUpdate: - ... - - # Voice - - @overload - async def wait_for( - self, - event: Literal['voice_state_update'], - /, - *, - check: Optional[Callable[[Member, VoiceState, VoiceState], bool]], - timeout: Optional[float] = None, - ) -> Tuple[Member, VoiceState, VoiceState]: - ... - - # Commands - - @overload - async def wait_for( - self: Union[Bot, AutoShardedBot], - event: Literal["command", "command_completion"], - /, - *, - check: Optional[Callable[[Context[Any]], bool]] = None, - timeout: Optional[float] = None, - ) -> Context[Any]: - ... - - @overload - async def wait_for( - self: Union[Bot, AutoShardedBot], - event: Literal["command_error"], - /, - *, - check: Optional[Callable[[Context[Any], CommandError], bool]] = None, - timeout: Optional[float] = None, - ) -> Tuple[Context[Any], CommandError]: - ... - - @overload - async def wait_for( - self, - event: str, - /, - *, - check: Optional[Callable[..., bool]] = None, - timeout: Optional[float] = None, - ) -> Any: - ... - - def wait_for( - self, - event: str, - /, - *, - check: Optional[Callable[..., bool]] = None, - timeout: Optional[float] = None, - ) -> Coro[Any]: - """|coro| - - Waits for a WebSocket event to be dispatched. - - This could be used to wait for a user to reply to a message, - or to react to a message, or to edit a message in a self-contained - way. - - The ``timeout`` parameter is passed onto :func:`asyncio.wait_for`. By default, - it does not timeout. Note that this does propagate the - :exc:`asyncio.TimeoutError` for you in case of timeout and is provided for - ease of use. - - In case the event returns multiple arguments, a :class:`tuple` containing those - arguments is returned instead. Please check the - :ref:`documentation ` for a list of events and their - parameters. - - This function returns the **first event that meets the requirements**. - - Examples - --------- - - Waiting for a user reply: :: - - @client.event - async def on_message(message): - if message.content.startswith('$greet'): - channel = message.channel - await channel.send('Say hello!') - - def check(m): - return m.content == 'hello' and m.channel == channel - - msg = await client.wait_for('message', check=check) - await channel.send(f'Hello {msg.author}!') - - Waiting for a thumbs up reaction from the message author: :: - - @client.event - async def on_message(message): - if message.content.startswith('$thumb'): - channel = message.channel - await channel.send('Send me that \N{THUMBS UP SIGN} reaction, mate') - - def check(reaction, user): - return user == message.author and str(reaction.emoji) == '\N{THUMBS UP SIGN}' - - try: - reaction, user = await client.wait_for('reaction_add', timeout=60.0, check=check) - except asyncio.TimeoutError: - await channel.send('\N{THUMBS DOWN SIGN}') - else: - await channel.send('\N{THUMBS UP SIGN}') - - .. versionchanged:: 2.0 - - ``event`` parameter is now positional-only. - - - Parameters - ------------ - event: :class:`str` - The event name, similar to the :ref:`event reference `, - but without the ``on_`` prefix, to wait for. - check: Optional[Callable[..., :class:`bool`]] - A predicate to check what to wait for. The arguments must meet the - parameters of the event being waited for. - timeout: Optional[:class:`float`] - The number of seconds to wait before timing out and raising - :exc:`asyncio.TimeoutError`. - - Raises - ------- - asyncio.TimeoutError - If a timeout is provided and it was reached. - - Returns - -------- - Any - Returns no arguments, a single argument, or a :class:`tuple` of multiple - arguments that mirrors the parameters passed in the - :ref:`event reference `. - """ - - future = self.loop.create_future() - if check is None: - - def _check(*args): - return True - - check = _check - - ev = event.lower() - try: - listeners = self._listeners[ev] - except KeyError: - listeners = [] - self._listeners[ev] = listeners - - listeners.append((future, check)) - return asyncio.wait_for(future, timeout) - - # event registration - - def event(self, coro: CoroT, /) -> CoroT: - """A decorator that registers an event to listen to. - - You can find more info about the events on the :ref:`documentation below `. - - The events must be a :ref:`coroutine `, if not, :exc:`TypeError` is raised. - - Example - --------- - - .. code-block:: python3 - - @client.event - async def on_ready(): - print('Ready!') - - .. versionchanged:: 2.0 - - ``coro`` parameter is now positional-only. - - Raises - -------- - TypeError - The coroutine passed is not actually a coroutine. - """ - - if not asyncio.iscoroutinefunction(coro): - raise TypeError('event registered must be a coroutine function') - - setattr(self, coro.__name__, coro) - _log.debug('%s has successfully been registered as an event', coro.__name__) - return coro - - async def change_presence( - self, - *, - activity: Optional[BaseActivity] = None, - status: Optional[Status] = None, - ) -> None: - """|coro| - - Changes the client's presence. - - Example - --------- - - .. code-block:: python3 - - game = discord.Game("with the API") - await client.change_presence(status=discord.Status.idle, activity=game) - - .. versionchanged:: 2.0 - Removed the ``afk`` keyword-only parameter. - - .. versionchanged:: 2.0 - This function will now raise :exc:`TypeError` instead of - ``InvalidArgument``. - - Parameters - ---------- - activity: Optional[:class:`.BaseActivity`] - The activity being done. ``None`` if no currently active activity is done. - status: Optional[:class:`.Status`] - Indicates what status to change to. If ``None``, then - :attr:`.Status.online` is used. - - Raises - ------ - TypeError - If the ``activity`` parameter is not the proper type. - """ - - if status is None: - status_str = 'online' - status = Status.online - elif status is Status.offline: - status_str = 'invisible' - status = Status.offline - else: - status_str = str(status) - - await self.ws.change_presence(activity=activity, status=status_str) - - for guild in self._connection.guilds: - me = guild.me - if me is None: - continue - - if activity is not None: - me.activities = (activity,) # type: ignore # Type checker does not understand the downcast here - else: - me.activities = () - - me.status = status - - # Guild stuff - - async def fetch_guilds( - self, - *, - limit: Optional[int] = 200, - before: Optional[SnowflakeTime] = None, - after: Optional[SnowflakeTime] = None, - with_counts: bool = True, - ) -> AsyncIterator[Guild]: - """Retrieves an :term:`asynchronous iterator` that enables receiving your guilds. - - .. note:: - - Using this, you will only receive :attr:`.Guild.owner`, :attr:`.Guild.icon`, - :attr:`.Guild.id`, :attr:`.Guild.name`, :attr:`.Guild.approximate_member_count`, - and :attr:`.Guild.approximate_presence_count` per :class:`.Guild`. - - .. note:: - - This method is an API call. For general usage, consider :attr:`guilds` instead. - - Examples - --------- - - Usage :: - - async for guild in client.fetch_guilds(limit=150): - print(guild.name) - - Flattening into a list :: - - guilds = [guild async for guild in client.fetch_guilds(limit=150)] - # guilds is now a list of Guild... - - All parameters are optional. - - Parameters - ----------- - limit: Optional[:class:`int`] - The number of guilds to retrieve. - If ``None``, it retrieves every guild you have access to. Note, however, - that this would make it a slow operation. - Defaults to ``200``. - - .. versionchanged:: 2.0 - - The default has been changed to 200. - - before: Union[:class:`.abc.Snowflake`, :class:`datetime.datetime`] - Retrieves guilds before this date or object. - If a datetime is provided, it is recommended to use a UTC aware datetime. - If the datetime is naive, it is assumed to be local time. - after: Union[:class:`.abc.Snowflake`, :class:`datetime.datetime`] - Retrieve guilds after this date or object. - If a datetime is provided, it is recommended to use a UTC aware datetime. - If the datetime is naive, it is assumed to be local time. - with_counts: :class:`bool` - Whether to include count information in the guilds. This fills the - :attr:`.Guild.approximate_member_count` and :attr:`.Guild.approximate_presence_count` - attributes without needing any privileged intents. Defaults to ``True``. - - .. versionadded:: 2.3 - - Raises - ------ - HTTPException - Getting the guilds failed. - - Yields - -------- - :class:`.Guild` - The guild with the guild data parsed. - """ - - async def _before_strategy(retrieve: int, before: Optional[Snowflake], limit: Optional[int]): - before_id = before.id if before else None - data = await self.http.get_guilds(retrieve, before=before_id, with_counts=with_counts) - - if data: - if limit is not None: - limit -= len(data) - - before = Object(id=int(data[0]['id'])) - - return data, before, limit - - async def _after_strategy(retrieve: int, after: Optional[Snowflake], limit: Optional[int]): - after_id = after.id if after else None - data = await self.http.get_guilds(retrieve, after=after_id, with_counts=with_counts) - - if data: - if limit is not None: - limit -= len(data) - - after = Object(id=int(data[-1]['id'])) - - return data, after, limit - - if isinstance(before, datetime.datetime): - before = Object(id=time_snowflake(before, high=False)) - if isinstance(after, datetime.datetime): - after = Object(id=time_snowflake(after, high=True)) - - predicate: Optional[Callable[[GuildPayload], bool]] = None - strategy, state = _after_strategy, after - - if before: - strategy, state = _before_strategy, before - - if before and after: - predicate = lambda m: int(m['id']) > after.id - - while True: - retrieve = 200 if limit is None else min(limit, 200) - if retrieve < 1: - return - - data, state, limit = await strategy(retrieve, state, limit) - - if predicate: - data = filter(predicate, data) - - count = 0 - - for count, raw_guild in enumerate(data, 1): - yield Guild(state=self._connection, data=raw_guild) - - if count < 200: - # There's no data left after this - break - - async def fetch_template(self, code: Union[Template, str]) -> Template: - """|coro| - - Gets a :class:`.Template` from a discord.new URL or code. - - Parameters - ----------- - code: Union[:class:`.Template`, :class:`str`] - The Discord Template Code or URL (must be a discord.new URL). - - Raises - ------- - NotFound - The template is invalid. - HTTPException - Getting the template failed. - - Returns - -------- - :class:`.Template` - The template from the URL/code. - """ - code = utils.resolve_template(code) - data = await self.http.get_template(code) - return Template(data=data, state=self._connection) - - async def fetch_guild(self, guild_id: int, /, *, with_counts: bool = True) -> Guild: - """|coro| - - Retrieves a :class:`.Guild` from an ID. - - .. note:: - - Using this, you will **not** receive :attr:`.Guild.channels`, :attr:`.Guild.members`, - :attr:`.Member.activity` and :attr:`.Member.voice` per :class:`.Member`. - - .. note:: - - This method is an API call. For general usage, consider :meth:`get_guild` instead. - - .. versionchanged:: 2.0 - - ``guild_id`` parameter is now positional-only. - - - Parameters - ----------- - guild_id: :class:`int` - The guild's ID to fetch from. - with_counts: :class:`bool` - Whether to include count information in the guild. This fills the - :attr:`.Guild.approximate_member_count` and :attr:`.Guild.approximate_presence_count` - attributes without needing any privileged intents. Defaults to ``True``. - - .. versionadded:: 2.0 - - Raises - ------ - Forbidden - You do not have access to the guild. - HTTPException - Getting the guild failed. - - Returns - -------- - :class:`.Guild` - The guild from the ID. - """ - data = await self.http.get_guild(guild_id, with_counts=with_counts) - return Guild(data=data, state=self._connection) - - async def create_guild( - self, - *, - name: str, - icon: bytes = MISSING, - code: str = MISSING, - ) -> Guild: - """|coro| - - Creates a :class:`.Guild`. - - Bot accounts in more than 10 guilds are not allowed to create guilds. - - .. versionchanged:: 2.0 - ``name`` and ``icon`` parameters are now keyword-only. The ``region`` parameter has been removed. - - .. versionchanged:: 2.0 - This function will now raise :exc:`ValueError` instead of - ``InvalidArgument``. - - Parameters - ---------- - name: :class:`str` - The name of the guild. - icon: Optional[:class:`bytes`] - The :term:`py:bytes-like object` representing the icon. See :meth:`.ClientUser.edit` - for more details on what is expected. - code: :class:`str` - The code for a template to create the guild with. - - .. versionadded:: 1.4 - - Raises - ------ - HTTPException - Guild creation failed. - ValueError - Invalid icon image format given. Must be PNG or JPG. - - Returns - ------- - :class:`.Guild` - The guild created. This is not the same guild that is - added to cache. - """ - if icon is not MISSING: - icon_base64 = utils._bytes_to_base64_data(icon) - else: - icon_base64 = None - - if code: - data = await self.http.create_from_template(code, name, icon_base64) - else: - data = await self.http.create_guild(name, icon_base64) - return Guild(data=data, state=self._connection) - - async def fetch_stage_instance(self, channel_id: int, /) -> StageInstance: - """|coro| - - Gets a :class:`.StageInstance` for a stage channel id. - - .. versionadded:: 2.0 - - Parameters - ----------- - channel_id: :class:`int` - The stage channel ID. - - Raises - ------- - NotFound - The stage instance or channel could not be found. - HTTPException - Getting the stage instance failed. - - Returns - -------- - :class:`.StageInstance` - The stage instance from the stage channel ID. - """ - data = await self.http.get_stage_instance(channel_id) - guild = self.get_guild(int(data['guild_id'])) - # Guild can technically be None here but this is being explicitly silenced right now. - return StageInstance(guild=guild, state=self._connection, data=data) # type: ignore - - # Invite management - - async def fetch_invite( - self, - url: Union[Invite, str], - *, - with_counts: bool = True, - with_expiration: bool = True, - scheduled_event_id: Optional[int] = None, - ) -> Invite: - """|coro| - - Gets an :class:`.Invite` from a discord.gg URL or ID. - - .. note:: - - If the invite is for a guild you have not joined, the guild and channel - attributes of the returned :class:`.Invite` will be :class:`.PartialInviteGuild` and - :class:`.PartialInviteChannel` respectively. - - Parameters - ----------- - url: Union[:class:`.Invite`, :class:`str`] - The Discord invite ID or URL (must be a discord.gg URL). - with_counts: :class:`bool` - Whether to include count information in the invite. This fills the - :attr:`.Invite.approximate_member_count` and :attr:`.Invite.approximate_presence_count` - fields. - with_expiration: :class:`bool` - Whether to include the expiration date of the invite. This fills the - :attr:`.Invite.expires_at` field. - - .. versionadded:: 2.0 - scheduled_event_id: Optional[:class:`int`] - The ID of the scheduled event this invite is for. - - .. note:: - - It is not possible to provide a url that contains an ``event_id`` parameter - when using this parameter. - - .. versionadded:: 2.0 - - Raises - ------- - ValueError - The url contains an ``event_id``, but ``scheduled_event_id`` has also been provided. - NotFound - The invite has expired or is invalid. - HTTPException - Getting the invite failed. - - Returns - -------- - :class:`.Invite` - The invite from the URL/ID. - """ - - resolved = utils.resolve_invite(url) - - if scheduled_event_id and resolved.event: - raise ValueError('Cannot specify scheduled_event_id and contain an event_id in the url.') - - scheduled_event_id = scheduled_event_id or resolved.event - - data = await self.http.get_invite( - resolved.code, - with_counts=with_counts, - with_expiration=with_expiration, - guild_scheduled_event_id=scheduled_event_id, - ) - return Invite.from_incomplete(state=self._connection, data=data) - - async def delete_invite(self, invite: Union[Invite, str], /) -> None: - """|coro| - - Revokes an :class:`.Invite`, URL, or ID to an invite. - - You must have :attr:`~.Permissions.manage_channels` in - the associated guild to do this. - - .. versionchanged:: 2.0 - - ``invite`` parameter is now positional-only. - - Parameters - ---------- - invite: Union[:class:`.Invite`, :class:`str`] - The invite to revoke. - - Raises - ------- - Forbidden - You do not have permissions to revoke invites. - NotFound - The invite is invalid or expired. - HTTPException - Revoking the invite failed. - """ - - resolved = utils.resolve_invite(invite) - await self.http.delete_invite(resolved.code) - - # Miscellaneous stuff - - async def fetch_widget(self, guild_id: int, /) -> Widget: - """|coro| - - Gets a :class:`.Widget` from a guild ID. - - .. note:: - - The guild must have the widget enabled to get this information. - - .. versionchanged:: 2.0 - - ``guild_id`` parameter is now positional-only. - - Parameters - ----------- - guild_id: :class:`int` - The ID of the guild. - - Raises - ------- - Forbidden - The widget for this guild is disabled. - HTTPException - Retrieving the widget failed. - - Returns - -------- - :class:`.Widget` - The guild's widget. - """ - data = await self.http.get_widget(guild_id) - - return Widget(state=self._connection, data=data) - - async def application_info(self) -> AppInfo: - """|coro| - - Retrieves the bot's application information. - - Raises - ------- - HTTPException - Retrieving the information failed somehow. - - Returns - -------- - :class:`.AppInfo` - The bot's application information. - """ - data = await self.http.application_info() - return AppInfo(self._connection, data) - - async def fetch_user(self, user_id: int, /) -> User: - """|coro| - - Retrieves a :class:`~discord.User` based on their ID. - You do not have to share any guilds with the user to get this information, - however many operations do require that you do. - - .. note:: - - This method is an API call. If you have :attr:`discord.Intents.members` and member cache enabled, consider :meth:`get_user` instead. - - .. versionchanged:: 2.0 - - ``user_id`` parameter is now positional-only. - - Parameters - ----------- - user_id: :class:`int` - The user's ID to fetch from. - - Raises - ------- - NotFound - A user with this ID does not exist. - HTTPException - Fetching the user failed. - - Returns - -------- - :class:`~discord.User` - The user you requested. - """ - data = await self.http.get_user(user_id) - return User(state=self._connection, data=data) - - async def fetch_channel(self, channel_id: int, /) -> Union[GuildChannel, PrivateChannel, Thread]: - """|coro| - - Retrieves a :class:`.abc.GuildChannel`, :class:`.abc.PrivateChannel`, or :class:`.Thread` with the specified ID. - - .. note:: - - This method is an API call. For general usage, consider :meth:`get_channel` instead. - - .. versionadded:: 1.2 - - .. versionchanged:: 2.0 - - ``channel_id`` parameter is now positional-only. - - Raises - ------- - InvalidData - An unknown channel type was received from Discord. - HTTPException - Retrieving the channel failed. - NotFound - Invalid Channel ID. - Forbidden - You do not have permission to fetch this channel. - - Returns - -------- - Union[:class:`.abc.GuildChannel`, :class:`.abc.PrivateChannel`, :class:`.Thread`] - The channel from the ID. - """ - data = await self.http.get_channel(channel_id) - - factory, ch_type = _threaded_channel_factory(data['type']) - if factory is None: - raise InvalidData('Unknown channel type {type} for channel ID {id}.'.format_map(data)) - - if ch_type in (ChannelType.group, ChannelType.private): - # the factory will be a DMChannel or GroupChannel here - channel = factory(me=self.user, data=data, state=self._connection) # type: ignore - else: - # the factory can't be a DMChannel or GroupChannel here - guild_id = int(data['guild_id']) # type: ignore - guild = self._connection._get_or_create_unavailable_guild(guild_id) - # the factory should be a GuildChannel or Thread - channel = factory(guild=guild, state=self._connection, data=data) # type: ignore - - return channel - - async def fetch_webhook(self, webhook_id: int, /) -> Webhook: - """|coro| - - Retrieves a :class:`.Webhook` with the specified ID. - - .. versionchanged:: 2.0 - - ``webhook_id`` parameter is now positional-only. - - Raises - -------- - HTTPException - Retrieving the webhook failed. - NotFound - Invalid webhook ID. - Forbidden - You do not have permission to fetch this webhook. - - Returns - --------- - :class:`.Webhook` - The webhook you requested. - """ - data = await self.http.get_webhook(webhook_id) - return Webhook.from_state(data, state=self._connection) - - async def fetch_sticker(self, sticker_id: int, /) -> Union[StandardSticker, GuildSticker]: - """|coro| - - Retrieves a :class:`.Sticker` with the specified ID. - - .. versionadded:: 2.0 - - Raises - -------- - HTTPException - Retrieving the sticker failed. - NotFound - Invalid sticker ID. - - Returns - -------- - Union[:class:`.StandardSticker`, :class:`.GuildSticker`] - The sticker you requested. - """ - data = await self.http.get_sticker(sticker_id) - cls, _ = _sticker_factory(data['type']) - # The type checker is not smart enough to figure out the constructor is correct - return cls(state=self._connection, data=data) # type: ignore - - async def fetch_premium_sticker_packs(self) -> List[StickerPack]: - """|coro| - - Retrieves all available premium sticker packs. - - .. versionadded:: 2.0 - - Raises - ------- - HTTPException - Retrieving the sticker packs failed. - - Returns - --------- - List[:class:`.StickerPack`] - All available premium sticker packs. - """ - data = await self.http.list_premium_sticker_packs() - return [StickerPack(state=self._connection, data=pack) for pack in data['sticker_packs']] - - async def create_dm(self, user: Snowflake) -> DMChannel: - """|coro| - - Creates a :class:`.DMChannel` with this user. - - This should be rarely called, as this is done transparently for most - people. - - .. versionadded:: 2.0 - - Parameters - ----------- - user: :class:`~discord.abc.Snowflake` - The user to create a DM with. - - Returns - ------- - :class:`.DMChannel` - The channel that was created. - """ - state = self._connection - found = state._get_private_channel_by_user(user.id) - if found: - return found - - data = await state.http.start_private_message(user.id) - return state.add_dm_channel(data) - - def add_view(self, view: View, *, message_id: Optional[int] = None) -> None: - """Registers a :class:`~discord.ui.View` for persistent listening. - - This method should be used for when a view is comprised of components - that last longer than the lifecycle of the program. - - .. versionadded:: 2.0 - - Parameters - ------------ - view: :class:`discord.ui.View` - The view to register for dispatching. - message_id: Optional[:class:`int`] - The message ID that the view is attached to. This is currently used to - refresh the view's state during message update events. If not given - then message update events are not propagated for the view. - - Raises - ------- - TypeError - A view was not passed. - ValueError - The view is not persistent or is already finished. A persistent view has no timeout - and all their components have an explicitly provided custom_id. - """ - - if not isinstance(view, View): - raise TypeError(f'expected an instance of View not {view.__class__.__name__}') - - if not view.is_persistent(): - raise ValueError('View is not persistent. Items need to have a custom_id set and View must have no timeout') - - if view.is_finished(): - raise ValueError('View is already finished.') - - self._connection.store_view(view, message_id) - - @property - def persistent_views(self) -> Sequence[View]: - """Sequence[:class:`.View`]: A sequence of persistent views added to the client. - - .. versionadded:: 2.0 - """ - return self._connection.persistent_views diff --git a/.venv/Lib/site-packages/discord/colour.py b/.venv/Lib/site-packages/discord/colour.py deleted file mode 100644 index e640f9d..0000000 --- a/.venv/Lib/site-packages/discord/colour.py +++ /dev/null @@ -1,523 +0,0 @@ -""" -The MIT License (MIT) - -Copyright (c) 2015-present Rapptz - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the "Software"), -to deal in the Software without restriction, including without limitation -the rights to use, copy, modify, merge, publish, distribute, sublicense, -and/or sell copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. -""" -from __future__ import annotations - -import colorsys -import random -import re - -from typing import TYPE_CHECKING, Optional, Tuple, Union - -if TYPE_CHECKING: - from typing_extensions import Self - -__all__ = ( - 'Colour', - 'Color', -) - -RGB_REGEX = re.compile(r'rgb\s*\((?P[0-9.]+%?)\s*,\s*(?P[0-9.]+%?)\s*,\s*(?P[0-9.]+%?)\s*\)') - - -def parse_hex_number(argument: str) -> Colour: - arg = ''.join(i * 2 for i in argument) if len(argument) == 3 else argument - try: - value = int(arg, base=16) - if not (0 <= value <= 0xFFFFFF): - raise ValueError('hex number out of range for 24-bit colour') - except ValueError: - raise ValueError('invalid hex digit given') from None - else: - return Color(value=value) - - -def parse_rgb_number(number: str) -> int: - if number[-1] == '%': - value = float(number[:-1]) - if not (0 <= value <= 100): - raise ValueError('rgb percentage can only be between 0 to 100') - return round(255 * (value / 100)) - - value = int(number) - if not (0 <= value <= 255): - raise ValueError('rgb number can only be between 0 to 255') - return value - - -def parse_rgb(argument: str, *, regex: re.Pattern[str] = RGB_REGEX) -> Colour: - match = regex.match(argument) - if match is None: - raise ValueError('invalid rgb syntax found') - - red = parse_rgb_number(match.group('r')) - green = parse_rgb_number(match.group('g')) - blue = parse_rgb_number(match.group('b')) - return Color.from_rgb(red, green, blue) - - -class Colour: - """Represents a Discord role colour. This class is similar - to a (red, green, blue) :class:`tuple`. - - There is an alias for this called Color. - - .. container:: operations - - .. describe:: x == y - - Checks if two colours are equal. - - .. describe:: x != y - - Checks if two colours are not equal. - - .. describe:: hash(x) - - Return the colour's hash. - - .. describe:: str(x) - - Returns the hex format for the colour. - - .. describe:: int(x) - - Returns the raw colour value. - - .. note:: - - The colour values in the classmethods are mostly provided as-is and can change between - versions should the Discord client's representation of that colour also change. - - Attributes - ------------ - value: :class:`int` - The raw integer colour value. - """ - - __slots__ = ('value',) - - def __init__(self, value: int): - if not isinstance(value, int): - raise TypeError(f'Expected int parameter, received {value.__class__.__name__} instead.') - - self.value: int = value - - def _get_byte(self, byte: int) -> int: - return (self.value >> (8 * byte)) & 0xFF - - def __eq__(self, other: object) -> bool: - return isinstance(other, Colour) and self.value == other.value - - def __ne__(self, other: object) -> bool: - return not self.__eq__(other) - - def __str__(self) -> str: - return f'#{self.value:0>6x}' - - def __int__(self) -> int: - return self.value - - def __repr__(self) -> str: - return f'' - - def __hash__(self) -> int: - return hash(self.value) - - @property - def r(self) -> int: - """:class:`int`: Returns the red component of the colour.""" - return self._get_byte(2) - - @property - def g(self) -> int: - """:class:`int`: Returns the green component of the colour.""" - return self._get_byte(1) - - @property - def b(self) -> int: - """:class:`int`: Returns the blue component of the colour.""" - return self._get_byte(0) - - def to_rgb(self) -> Tuple[int, int, int]: - """Tuple[:class:`int`, :class:`int`, :class:`int`]: Returns an (r, g, b) tuple representing the colour.""" - return (self.r, self.g, self.b) - - @classmethod - def from_rgb(cls, r: int, g: int, b: int) -> Self: - """Constructs a :class:`Colour` from an RGB tuple.""" - return cls((r << 16) + (g << 8) + b) - - @classmethod - def from_hsv(cls, h: float, s: float, v: float) -> Self: - """Constructs a :class:`Colour` from an HSV tuple.""" - rgb = colorsys.hsv_to_rgb(h, s, v) - return cls.from_rgb(*(int(x * 255) for x in rgb)) - - @classmethod - def from_str(cls, value: str) -> Self: - """Constructs a :class:`Colour` from a string. - - The following formats are accepted: - - - ``0x`` - - ``#`` - - ``0x#`` - - ``rgb(, , )`` - - Like CSS, ```` can be either 0-255 or 0-100% and ```` can be - either a 6 digit hex number or a 3 digit hex shortcut (e.g. #FFF). - - .. versionadded:: 2.0 - - Raises - ------- - ValueError - The string could not be converted into a colour. - """ - - if value[0] == '#': - return parse_hex_number(value[1:]) - - if value[0:2] == '0x': - rest = value[2:] - # Legacy backwards compatible syntax - if rest.startswith('#'): - return parse_hex_number(rest[1:]) - return parse_hex_number(rest) - - arg = value.lower() - if arg[0:3] == 'rgb': - return parse_rgb(arg) - - raise ValueError('unknown colour format given') - - @classmethod - def default(cls) -> Self: - """A factory method that returns a :class:`Colour` with a value of ``0``. - - .. colour:: #000000 - """ - return cls(0) - - @classmethod - def random(cls, *, seed: Optional[Union[int, str, float, bytes, bytearray]] = None) -> Self: - """A factory method that returns a :class:`Colour` with a random hue. - - .. note:: - - The random algorithm works by choosing a colour with a random hue but - with maxed out saturation and value. - - .. versionadded:: 1.6 - - Parameters - ------------ - seed: Optional[Union[:class:`int`, :class:`str`, :class:`float`, :class:`bytes`, :class:`bytearray`]] - The seed to initialize the RNG with. If ``None`` is passed the default RNG is used. - - .. versionadded:: 1.7 - """ - rand = random if seed is None else random.Random(seed) - return cls.from_hsv(rand.random(), 1, 1) - - @classmethod - def teal(cls) -> Self: - """A factory method that returns a :class:`Colour` with a value of ``0x1ABC9C``. - - .. colour:: #1ABC9C - """ - return cls(0x1ABC9C) - - @classmethod - def dark_teal(cls) -> Self: - """A factory method that returns a :class:`Colour` with a value of ``0x11806A``. - - .. colour:: #11806A - """ - return cls(0x11806A) - - @classmethod - def brand_green(cls) -> Self: - """A factory method that returns a :class:`Colour` with a value of ``0x57F287``. - - .. colour:: #57F287 - - - .. versionadded:: 2.0 - """ - return cls(0x57F287) - - @classmethod - def green(cls) -> Self: - """A factory method that returns a :class:`Colour` with a value of ``0x2ECC71``. - - .. colour:: #2ECC71 - """ - return cls(0x2ECC71) - - @classmethod - def dark_green(cls) -> Self: - """A factory method that returns a :class:`Colour` with a value of ``0x1F8B4C``. - - .. colour:: #1F8B4C - """ - return cls(0x1F8B4C) - - @classmethod - def blue(cls) -> Self: - """A factory method that returns a :class:`Colour` with a value of ``0x3498DB``. - - .. colour:: #3498DB - """ - return cls(0x3498DB) - - @classmethod - def dark_blue(cls) -> Self: - """A factory method that returns a :class:`Colour` with a value of ``0x206694``. - - .. colour:: #206694 - """ - return cls(0x206694) - - @classmethod - def purple(cls) -> Self: - """A factory method that returns a :class:`Colour` with a value of ``0x9B59B6``. - - .. colour:: #9B59B6 - """ - return cls(0x9B59B6) - - @classmethod - def dark_purple(cls) -> Self: - """A factory method that returns a :class:`Colour` with a value of ``0x71368A``. - - .. colour:: #71368A - """ - return cls(0x71368A) - - @classmethod - def magenta(cls) -> Self: - """A factory method that returns a :class:`Colour` with a value of ``0xE91E63``. - - .. colour:: #E91E63 - """ - return cls(0xE91E63) - - @classmethod - def dark_magenta(cls) -> Self: - """A factory method that returns a :class:`Colour` with a value of ``0xAD1457``. - - .. colour:: #AD1457 - """ - return cls(0xAD1457) - - @classmethod - def gold(cls) -> Self: - """A factory method that returns a :class:`Colour` with a value of ``0xF1C40F``. - - .. colour:: #F1C40F - """ - return cls(0xF1C40F) - - @classmethod - def dark_gold(cls) -> Self: - """A factory method that returns a :class:`Colour` with a value of ``0xC27C0E``. - - .. colour:: #C27C0E - """ - return cls(0xC27C0E) - - @classmethod - def orange(cls) -> Self: - """A factory method that returns a :class:`Colour` with a value of ``0xE67E22``. - - .. colour:: #E67E22 - """ - return cls(0xE67E22) - - @classmethod - def dark_orange(cls) -> Self: - """A factory method that returns a :class:`Colour` with a value of ``0xA84300``. - - .. colour:: #A84300 - """ - return cls(0xA84300) - - @classmethod - def brand_red(cls) -> Self: - """A factory method that returns a :class:`Colour` with a value of ``0xED4245``. - - .. colour:: #ED4245 - - .. versionadded:: 2.0 - """ - return cls(0xED4245) - - @classmethod - def red(cls) -> Self: - """A factory method that returns a :class:`Colour` with a value of ``0xE74C3C``. - - .. colour:: #E74C3C - """ - return cls(0xE74C3C) - - @classmethod - def dark_red(cls) -> Self: - """A factory method that returns a :class:`Colour` with a value of ``0x992D22``. - - .. colour:: #992D22 - """ - return cls(0x992D22) - - @classmethod - def lighter_grey(cls) -> Self: - """A factory method that returns a :class:`Colour` with a value of ``0x95A5A6``. - - .. colour:: #95A5A6 - """ - return cls(0x95A5A6) - - lighter_gray = lighter_grey - - @classmethod - def dark_grey(cls) -> Self: - """A factory method that returns a :class:`Colour` with a value of ``0x607d8b``. - - .. colour:: #607d8b - """ - return cls(0x607D8B) - - dark_gray = dark_grey - - @classmethod - def light_grey(cls) -> Self: - """A factory method that returns a :class:`Colour` with a value of ``0x979C9F``. - - .. colour:: #979C9F - """ - return cls(0x979C9F) - - light_gray = light_grey - - @classmethod - def darker_grey(cls) -> Self: - """A factory method that returns a :class:`Colour` with a value of ``0x546E7A``. - - .. colour:: #546E7A - """ - return cls(0x546E7A) - - darker_gray = darker_grey - - @classmethod - def og_blurple(cls) -> Self: - """A factory method that returns a :class:`Colour` with a value of ``0x7289DA``. - - .. colour:: #7289DA - """ - return cls(0x7289DA) - - @classmethod - def blurple(cls) -> Self: - """A factory method that returns a :class:`Colour` with a value of ``0x5865F2``. - - .. colour:: #5865F2 - """ - return cls(0x5865F2) - - @classmethod - def greyple(cls) -> Self: - """A factory method that returns a :class:`Colour` with a value of ``0x99AAB5``. - - .. colour:: #99AAB5 - """ - return cls(0x99AAB5) - - @classmethod - def dark_theme(cls) -> Self: - """A factory method that returns a :class:`Colour` with a value of ``0x313338``. - - This will appear transparent on Discord's dark theme. - - .. colour:: #313338 - - .. versionadded:: 1.5 - - .. versionchanged:: 2.2 - Updated colour from previous ``0x36393F`` to reflect discord theme changes. - """ - return cls(0x313338) - - @classmethod - def fuchsia(cls) -> Self: - """A factory method that returns a :class:`Colour` with a value of ``0xEB459E``. - - .. colour:: #EB459E - - .. versionadded:: 2.0 - """ - return cls(0xEB459E) - - @classmethod - def yellow(cls) -> Self: - """A factory method that returns a :class:`Colour` with a value of ``0xFEE75C``. - - .. colour:: #FEE75C - - .. versionadded:: 2.0 - """ - return cls(0xFEE75C) - - @classmethod - def dark_embed(cls) -> Self: - """A factory method that returns a :class:`Colour` with a value of ``0x2B2D31``. - - .. colour:: #2B2D31 - - .. versionadded:: 2.2 - """ - return cls(0x2B2D31) - - @classmethod - def light_embed(cls) -> Self: - """A factory method that returns a :class:`Colour` with a value of ``0xEEEFF1``. - - .. colour:: #EEEFF1 - - .. versionadded:: 2.2 - """ - return cls(0xEEEFF1) - - @classmethod - def pink(cls) -> Self: - """A factory method that returns a :class:`Colour` with a value of ``0xEB459F``. - - .. colour:: #EB459F - - .. versionadded:: 2.3 - """ - return cls(0xEB459F) - - -Color = Colour diff --git a/.venv/Lib/site-packages/discord/components.py b/.venv/Lib/site-packages/discord/components.py deleted file mode 100644 index 5c3679b..0000000 --- a/.venv/Lib/site-packages/discord/components.py +++ /dev/null @@ -1,533 +0,0 @@ -""" -The MIT License (MIT) - -Copyright (c) 2015-present Rapptz - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the "Software"), -to deal in the Software without restriction, including without limitation -the rights to use, copy, modify, merge, publish, distribute, sublicense, -and/or sell copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. -""" - -from __future__ import annotations - -from typing import ClassVar, List, Literal, Optional, TYPE_CHECKING, Tuple, Union, overload -from .enums import try_enum, ComponentType, ButtonStyle, TextStyle, ChannelType -from .utils import get_slots, MISSING -from .partial_emoji import PartialEmoji, _EmojiTag - -if TYPE_CHECKING: - from typing_extensions import Self - - from .types.components import ( - Component as ComponentPayload, - ButtonComponent as ButtonComponentPayload, - SelectMenu as SelectMenuPayload, - SelectOption as SelectOptionPayload, - ActionRow as ActionRowPayload, - TextInput as TextInputPayload, - ActionRowChildComponent as ActionRowChildComponentPayload, - ) - from .emoji import Emoji - - ActionRowChildComponentType = Union['Button', 'SelectMenu', 'TextInput'] - - -__all__ = ( - 'Component', - 'ActionRow', - 'Button', - 'SelectMenu', - 'SelectOption', - 'TextInput', -) - - -class Component: - """Represents a Discord Bot UI Kit Component. - - Currently, the only components supported by Discord are: - - - :class:`ActionRow` - - :class:`Button` - - :class:`SelectMenu` - - :class:`TextInput` - - This class is abstract and cannot be instantiated. - - .. versionadded:: 2.0 - """ - - __slots__: Tuple[str, ...] = () - - __repr_info__: ClassVar[Tuple[str, ...]] - - def __repr__(self) -> str: - attrs = ' '.join(f'{key}={getattr(self, key)!r}' for key in self.__repr_info__) - return f'<{self.__class__.__name__} {attrs}>' - - @property - def type(self) -> ComponentType: - """:class:`ComponentType`: The type of component.""" - raise NotImplementedError - - @classmethod - def _raw_construct(cls, **kwargs) -> Self: - self = cls.__new__(cls) - for slot in get_slots(cls): - try: - value = kwargs[slot] - except KeyError: - pass - else: - setattr(self, slot, value) - return self - - def to_dict(self) -> ComponentPayload: - raise NotImplementedError - - -class ActionRow(Component): - """Represents a Discord Bot UI Kit Action Row. - - This is a component that holds up to 5 children components in a row. - - This inherits from :class:`Component`. - - .. versionadded:: 2.0 - - Attributes - ------------ - children: List[Union[:class:`Button`, :class:`SelectMenu`, :class:`TextInput`]] - The children components that this holds, if any. - """ - - __slots__: Tuple[str, ...] = ('children',) - - __repr_info__: ClassVar[Tuple[str, ...]] = __slots__ - - def __init__(self, data: ActionRowPayload, /) -> None: - self.children: List[ActionRowChildComponentType] = [] - - for component_data in data.get('components', []): - component = _component_factory(component_data) - - if component is not None: - self.children.append(component) - - @property - def type(self) -> Literal[ComponentType.action_row]: - """:class:`ComponentType`: The type of component.""" - return ComponentType.action_row - - def to_dict(self) -> ActionRowPayload: - return { - 'type': self.type.value, - 'components': [child.to_dict() for child in self.children], - } - - -class Button(Component): - """Represents a button from the Discord Bot UI Kit. - - This inherits from :class:`Component`. - - .. note:: - - The user constructible and usable type to create a button is :class:`discord.ui.Button` - not this one. - - .. versionadded:: 2.0 - - Attributes - ----------- - style: :class:`.ButtonStyle` - The style of the button. - custom_id: Optional[:class:`str`] - The ID of the button that gets received during an interaction. - If this button is for a URL, it does not have a custom ID. - url: Optional[:class:`str`] - The URL this button sends you to. - disabled: :class:`bool` - Whether the button is disabled or not. - label: Optional[:class:`str`] - The label of the button, if any. - emoji: Optional[:class:`PartialEmoji`] - The emoji of the button, if available. - """ - - __slots__: Tuple[str, ...] = ( - 'style', - 'custom_id', - 'url', - 'disabled', - 'label', - 'emoji', - ) - - __repr_info__: ClassVar[Tuple[str, ...]] = __slots__ - - def __init__(self, data: ButtonComponentPayload, /) -> None: - self.style: ButtonStyle = try_enum(ButtonStyle, data['style']) - self.custom_id: Optional[str] = data.get('custom_id') - self.url: Optional[str] = data.get('url') - self.disabled: bool = data.get('disabled', False) - self.label: Optional[str] = data.get('label') - self.emoji: Optional[PartialEmoji] - try: - self.emoji = PartialEmoji.from_dict(data['emoji']) - except KeyError: - self.emoji = None - - @property - def type(self) -> Literal[ComponentType.button]: - """:class:`ComponentType`: The type of component.""" - return ComponentType.button - - def to_dict(self) -> ButtonComponentPayload: - payload: ButtonComponentPayload = { - 'type': 2, - 'style': self.style.value, - 'disabled': self.disabled, - } - - if self.label: - payload['label'] = self.label - - if self.custom_id: - payload['custom_id'] = self.custom_id - - if self.url: - payload['url'] = self.url - - if self.emoji: - payload['emoji'] = self.emoji.to_dict() - - return payload - - -class SelectMenu(Component): - """Represents a select menu from the Discord Bot UI Kit. - - A select menu is functionally the same as a dropdown, however - on mobile it renders a bit differently. - - .. note:: - - The user constructible and usable type to create a select menu is - :class:`discord.ui.Select` not this one. - - .. versionadded:: 2.0 - - Attributes - ------------ - type: :class:`ComponentType` - The type of component. - custom_id: Optional[:class:`str`] - The ID of the select menu that gets received during an interaction. - placeholder: Optional[:class:`str`] - The placeholder text that is shown if nothing is selected, if any. - min_values: :class:`int` - The minimum number of items that must be chosen for this select menu. - Defaults to 1 and must be between 0 and 25. - max_values: :class:`int` - The maximum number of items that must be chosen for this select menu. - Defaults to 1 and must be between 1 and 25. - options: List[:class:`SelectOption`] - A list of options that can be selected in this menu. - disabled: :class:`bool` - Whether the select is disabled or not. - channel_types: List[:class:`.ChannelType`] - A list of channel types that are allowed to be chosen in this select menu. - """ - - __slots__: Tuple[str, ...] = ( - 'type', - 'custom_id', - 'placeholder', - 'min_values', - 'max_values', - 'options', - 'disabled', - 'channel_types', - ) - - __repr_info__: ClassVar[Tuple[str, ...]] = __slots__ - - def __init__(self, data: SelectMenuPayload, /) -> None: - self.type: ComponentType = try_enum(ComponentType, data['type']) - self.custom_id: str = data['custom_id'] - self.placeholder: Optional[str] = data.get('placeholder') - self.min_values: int = data.get('min_values', 1) - self.max_values: int = data.get('max_values', 1) - self.options: List[SelectOption] = [SelectOption.from_dict(option) for option in data.get('options', [])] - self.disabled: bool = data.get('disabled', False) - self.channel_types: List[ChannelType] = [try_enum(ChannelType, t) for t in data.get('channel_types', [])] - - def to_dict(self) -> SelectMenuPayload: - payload: SelectMenuPayload = { - 'type': self.type.value, - 'custom_id': self.custom_id, - 'min_values': self.min_values, - 'max_values': self.max_values, - 'disabled': self.disabled, - } - if self.placeholder: - payload['placeholder'] = self.placeholder - if self.options: - payload['options'] = [op.to_dict() for op in self.options] - if self.channel_types: - payload['channel_types'] = [t.value for t in self.channel_types] - - return payload - - -class SelectOption: - """Represents a select menu's option. - - These can be created by users. - - .. versionadded:: 2.0 - - Parameters - ----------- - label: :class:`str` - The label of the option. This is displayed to users. - Can only be up to 100 characters. - value: :class:`str` - The value of the option. This is not displayed to users. - If not provided when constructed then it defaults to the - label. Can only be up to 100 characters. - description: Optional[:class:`str`] - An additional description of the option, if any. - Can only be up to 100 characters. - emoji: Optional[Union[:class:`str`, :class:`Emoji`, :class:`PartialEmoji`]] - The emoji of the option, if available. - default: :class:`bool` - Whether this option is selected by default. - - Attributes - ----------- - label: :class:`str` - The label of the option. This is displayed to users. - Can only be up to 100 characters. - value: :class:`str` - The value of the option. This is not displayed to users. - If not provided when constructed then it defaults to the - label. Can only be up to 100 characters. - description: Optional[:class:`str`] - An additional description of the option, if any. - Can only be up to 100 characters. - default: :class:`bool` - Whether this option is selected by default. - """ - - __slots__: Tuple[str, ...] = ( - 'label', - 'value', - 'description', - '_emoji', - 'default', - ) - - def __init__( - self, - *, - label: str, - value: str = MISSING, - description: Optional[str] = None, - emoji: Optional[Union[str, Emoji, PartialEmoji]] = None, - default: bool = False, - ) -> None: - self.label: str = label - self.value: str = label if value is MISSING else value - self.description: Optional[str] = description - - self.emoji = emoji - self.default: bool = default - - def __repr__(self) -> str: - return ( - f'' - ) - - def __str__(self) -> str: - if self.emoji: - base = f'{self.emoji} {self.label}' - else: - base = self.label - - if self.description: - return f'{base}\n{self.description}' - return base - - @property - def emoji(self) -> Optional[PartialEmoji]: - """Optional[:class:`.PartialEmoji`]: The emoji of the option, if available.""" - return self._emoji - - @emoji.setter - def emoji(self, value: Optional[Union[str, Emoji, PartialEmoji]]) -> None: - if value is not None: - if isinstance(value, str): - self._emoji = PartialEmoji.from_str(value) - elif isinstance(value, _EmojiTag): - self._emoji = value._to_partial() - else: - raise TypeError(f'expected str, Emoji, or PartialEmoji, received {value.__class__.__name__} instead') - else: - self._emoji = None - - @classmethod - def from_dict(cls, data: SelectOptionPayload) -> SelectOption: - try: - emoji = PartialEmoji.from_dict(data['emoji']) - except KeyError: - emoji = None - - return cls( - label=data['label'], - value=data['value'], - description=data.get('description'), - emoji=emoji, - default=data.get('default', False), - ) - - def to_dict(self) -> SelectOptionPayload: - payload: SelectOptionPayload = { - 'label': self.label, - 'value': self.value, - 'default': self.default, - } - - if self.emoji: - payload['emoji'] = self.emoji.to_dict() - - if self.description: - payload['description'] = self.description - - return payload - - -class TextInput(Component): - """Represents a text input from the Discord Bot UI Kit. - - .. note:: - The user constructible and usable type to create a text input is - :class:`discord.ui.TextInput` not this one. - - .. versionadded:: 2.0 - - Attributes - ------------ - custom_id: Optional[:class:`str`] - The ID of the text input that gets received during an interaction. - label: :class:`str` - The label to display above the text input. - style: :class:`TextStyle` - The style of the text input. - placeholder: Optional[:class:`str`] - The placeholder text to display when the text input is empty. - value: Optional[:class:`str`] - The default value of the text input. - required: :class:`bool` - Whether the text input is required. - min_length: Optional[:class:`int`] - The minimum length of the text input. - max_length: Optional[:class:`int`] - The maximum length of the text input. - """ - - __slots__: Tuple[str, ...] = ( - 'style', - 'label', - 'custom_id', - 'placeholder', - 'value', - 'required', - 'min_length', - 'max_length', - ) - - __repr_info__: ClassVar[Tuple[str, ...]] = __slots__ - - def __init__(self, data: TextInputPayload, /) -> None: - self.style: TextStyle = try_enum(TextStyle, data['style']) - self.label: str = data['label'] - self.custom_id: str = data['custom_id'] - self.placeholder: Optional[str] = data.get('placeholder') - self.value: Optional[str] = data.get('value') - self.required: bool = data.get('required', True) - self.min_length: Optional[int] = data.get('min_length') - self.max_length: Optional[int] = data.get('max_length') - - @property - def type(self) -> Literal[ComponentType.text_input]: - """:class:`ComponentType`: The type of component.""" - return ComponentType.text_input - - def to_dict(self) -> TextInputPayload: - payload: TextInputPayload = { - 'type': self.type.value, - 'style': self.style.value, - 'label': self.label, - 'custom_id': self.custom_id, - 'required': self.required, - } - - if self.placeholder: - payload['placeholder'] = self.placeholder - - if self.value: - payload['value'] = self.value - - if self.min_length: - payload['min_length'] = self.min_length - - if self.max_length: - payload['max_length'] = self.max_length - - return payload - - @property - def default(self) -> Optional[str]: - """Optional[:class:`str`]: The default value of the text input. - - This is an alias to :attr:`value`. - """ - return self.value - - -@overload -def _component_factory(data: ActionRowChildComponentPayload) -> Optional[ActionRowChildComponentType]: - ... - - -@overload -def _component_factory(data: ComponentPayload) -> Optional[Union[ActionRow, ActionRowChildComponentType]]: - ... - - -def _component_factory(data: ComponentPayload) -> Optional[Union[ActionRow, ActionRowChildComponentType]]: - if data['type'] == 1: - return ActionRow(data) - elif data['type'] == 2: - return Button(data) - elif data['type'] == 4: - return TextInput(data) - elif data['type'] in (3, 5, 6, 7, 8): - return SelectMenu(data) diff --git a/.venv/Lib/site-packages/discord/context_managers.py b/.venv/Lib/site-packages/discord/context_managers.py deleted file mode 100644 index 09803c9..0000000 --- a/.venv/Lib/site-packages/discord/context_managers.py +++ /dev/null @@ -1,92 +0,0 @@ -""" -The MIT License (MIT) - -Copyright (c) 2015-present Rapptz - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the "Software"), -to deal in the Software without restriction, including without limitation -the rights to use, copy, modify, merge, publish, distribute, sublicense, -and/or sell copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. -""" - -from __future__ import annotations - -import asyncio -from typing import TYPE_CHECKING, Generator, Optional, Type, TypeVar - -if TYPE_CHECKING: - from .abc import Messageable, MessageableChannel - - from types import TracebackType - - BE = TypeVar('BE', bound=BaseException) - -# fmt: off -__all__ = ( - 'Typing', -) -# fmt: on - - -def _typing_done_callback(fut: asyncio.Future) -> None: - # just retrieve any exception and call it a day - try: - fut.exception() - except (asyncio.CancelledError, Exception): - pass - - -class Typing: - def __init__(self, messageable: Messageable) -> None: - self.loop: asyncio.AbstractEventLoop = messageable._state.loop - self.messageable: Messageable = messageable - self.channel: Optional[MessageableChannel] = None - - async def _get_channel(self) -> MessageableChannel: - if self.channel: - return self.channel - - self.channel = channel = await self.messageable._get_channel() - return channel - - async def wrapped_typer(self) -> None: - channel = await self._get_channel() - await channel._state.http.send_typing(channel.id) - - def __await__(self) -> Generator[None, None, None]: - return self.wrapped_typer().__await__() - - async def do_typing(self) -> None: - channel = await self._get_channel() - typing = channel._state.http.send_typing - - while True: - await asyncio.sleep(5) - await typing(channel.id) - - async def __aenter__(self) -> None: - channel = await self._get_channel() - await channel._state.http.send_typing(channel.id) - self.task: asyncio.Task[None] = self.loop.create_task(self.do_typing()) - self.task.add_done_callback(_typing_done_callback) - - async def __aexit__( - self, - exc_type: Optional[Type[BE]], - exc: Optional[BE], - traceback: Optional[TracebackType], - ) -> None: - self.task.cancel() diff --git a/.venv/Lib/site-packages/discord/embeds.py b/.venv/Lib/site-packages/discord/embeds.py deleted file mode 100644 index 6a79fef..0000000 --- a/.venv/Lib/site-packages/discord/embeds.py +++ /dev/null @@ -1,757 +0,0 @@ -""" -The MIT License (MIT) - -Copyright (c) 2015-present Rapptz - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the "Software"), -to deal in the Software without restriction, including without limitation -the rights to use, copy, modify, merge, publish, distribute, sublicense, -and/or sell copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. -""" - -from __future__ import annotations - -import datetime -from typing import Any, Dict, List, Mapping, Optional, Protocol, TYPE_CHECKING, TypeVar, Union - -from . import utils -from .colour import Colour - -# fmt: off -__all__ = ( - 'Embed', -) -# fmt: on - - -class EmbedProxy: - def __init__(self, layer: Dict[str, Any]): - self.__dict__.update(layer) - - def __len__(self) -> int: - return len(self.__dict__) - - def __repr__(self) -> str: - inner = ', '.join((f'{k}={v!r}' for k, v in self.__dict__.items() if not k.startswith('_'))) - return f'EmbedProxy({inner})' - - def __getattr__(self, attr: str) -> None: - return None - - def __eq__(self, other: object) -> bool: - return isinstance(other, EmbedProxy) and self.__dict__ == other.__dict__ - - -if TYPE_CHECKING: - from typing_extensions import Self - - from .types.embed import Embed as EmbedData, EmbedType - - T = TypeVar('T') - - class _EmbedFooterProxy(Protocol): - text: Optional[str] - icon_url: Optional[str] - - class _EmbedFieldProxy(Protocol): - name: Optional[str] - value: Optional[str] - inline: bool - - class _EmbedMediaProxy(Protocol): - url: Optional[str] - proxy_url: Optional[str] - height: Optional[int] - width: Optional[int] - - class _EmbedVideoProxy(Protocol): - url: Optional[str] - height: Optional[int] - width: Optional[int] - - class _EmbedProviderProxy(Protocol): - name: Optional[str] - url: Optional[str] - - class _EmbedAuthorProxy(Protocol): - name: Optional[str] - url: Optional[str] - icon_url: Optional[str] - proxy_icon_url: Optional[str] - - -class Embed: - """Represents a Discord embed. - - .. container:: operations - - .. describe:: len(x) - - Returns the total size of the embed. - Useful for checking if it's within the 6000 character limit. - - .. describe:: bool(b) - - Returns whether the embed has any data set. - - .. versionadded:: 2.0 - - .. describe:: x == y - - Checks if two embeds are equal. - - .. versionadded:: 2.0 - - For ease of use, all parameters that expect a :class:`str` are implicitly - casted to :class:`str` for you. - - .. versionchanged:: 2.0 - ``Embed.Empty`` has been removed in favour of ``None``. - - Attributes - ----------- - title: Optional[:class:`str`] - The title of the embed. - This can be set during initialisation. - Can only be up to 256 characters. - type: :class:`str` - The type of embed. Usually "rich". - This can be set during initialisation. - Possible strings for embed types can be found on discord's - :ddocs:`api docs ` - description: Optional[:class:`str`] - The description of the embed. - This can be set during initialisation. - Can only be up to 4096 characters. - url: Optional[:class:`str`] - The URL of the embed. - This can be set during initialisation. - timestamp: Optional[:class:`datetime.datetime`] - The timestamp of the embed content. This is an aware datetime. - If a naive datetime is passed, it is converted to an aware - datetime with the local timezone. - colour: Optional[Union[:class:`Colour`, :class:`int`]] - The colour code of the embed. Aliased to ``color`` as well. - This can be set during initialisation. - """ - - __slots__ = ( - 'title', - 'url', - 'type', - '_timestamp', - '_colour', - '_footer', - '_image', - '_thumbnail', - '_video', - '_provider', - '_author', - '_fields', - 'description', - ) - - def __init__( - self, - *, - colour: Optional[Union[int, Colour]] = None, - color: Optional[Union[int, Colour]] = None, - title: Optional[Any] = None, - type: EmbedType = 'rich', - url: Optional[Any] = None, - description: Optional[Any] = None, - timestamp: Optional[datetime.datetime] = None, - ): - - self.colour = colour if colour is not None else color - self.title: Optional[str] = title - self.type: EmbedType = type - self.url: Optional[str] = url - self.description: Optional[str] = description - - if self.title is not None: - self.title = str(self.title) - - if self.description is not None: - self.description = str(self.description) - - if self.url is not None: - self.url = str(self.url) - - if timestamp is not None: - self.timestamp = timestamp - - @classmethod - def from_dict(cls, data: Mapping[str, Any]) -> Self: - """Converts a :class:`dict` to a :class:`Embed` provided it is in the - format that Discord expects it to be in. - - You can find out about this format in the :ddocs:`official Discord documentation `. - - Parameters - ----------- - data: :class:`dict` - The dictionary to convert into an embed. - """ - # we are bypassing __init__ here since it doesn't apply here - self = cls.__new__(cls) - - # fill in the basic fields - - self.title = data.get('title', None) - self.type = data.get('type', None) - self.description = data.get('description', None) - self.url = data.get('url', None) - - if self.title is not None: - self.title = str(self.title) - - if self.description is not None: - self.description = str(self.description) - - if self.url is not None: - self.url = str(self.url) - - # try to fill in the more rich fields - - try: - self._colour = Colour(value=data['color']) - except KeyError: - pass - - try: - self._timestamp = utils.parse_time(data['timestamp']) - except KeyError: - pass - - for attr in ('thumbnail', 'video', 'provider', 'author', 'fields', 'image', 'footer'): - try: - value = data[attr] - except KeyError: - continue - else: - setattr(self, '_' + attr, value) - - return self - - def copy(self) -> Self: - """Returns a shallow copy of the embed.""" - return self.__class__.from_dict(self.to_dict()) - - def __len__(self) -> int: - total = len(self.title or '') + len(self.description or '') - for field in getattr(self, '_fields', []): - total += len(field['name']) + len(field['value']) - - try: - footer_text = self._footer['text'] - except (AttributeError, KeyError): - pass - else: - total += len(footer_text) - - try: - author = self._author - except AttributeError: - pass - else: - total += len(author['name']) - - return total - - def __bool__(self) -> bool: - return any( - ( - self.title, - self.url, - self.description, - self.colour, - self.fields, - self.timestamp, - self.author, - self.thumbnail, - self.footer, - self.image, - self.provider, - self.video, - ) - ) - - def __eq__(self, other: Embed) -> bool: - return isinstance(other, Embed) and ( - self.type == other.type - and self.title == other.title - and self.url == other.url - and self.description == other.description - and self.colour == other.colour - and self.fields == other.fields - and self.timestamp == other.timestamp - and self.author == other.author - and self.thumbnail == other.thumbnail - and self.footer == other.footer - and self.image == other.image - and self.provider == other.provider - and self.video == other.video - ) - - @property - def colour(self) -> Optional[Colour]: - return getattr(self, '_colour', None) - - @colour.setter - def colour(self, value: Optional[Union[int, Colour]]) -> None: - if value is None: - self._colour = None - elif isinstance(value, Colour): - self._colour = value - elif isinstance(value, int): - self._colour = Colour(value=value) - else: - raise TypeError(f'Expected discord.Colour, int, or None but received {value.__class__.__name__} instead.') - - color = colour - - @property - def timestamp(self) -> Optional[datetime.datetime]: - return getattr(self, '_timestamp', None) - - @timestamp.setter - def timestamp(self, value: Optional[datetime.datetime]) -> None: - if isinstance(value, datetime.datetime): - if value.tzinfo is None: - value = value.astimezone() - self._timestamp = value - elif value is None: - self._timestamp = None - else: - raise TypeError(f"Expected datetime.datetime or None received {value.__class__.__name__} instead") - - @property - def footer(self) -> _EmbedFooterProxy: - """Returns an ``EmbedProxy`` denoting the footer contents. - - See :meth:`set_footer` for possible values you can access. - - If the attribute has no value then ``None`` is returned. - """ - # Lying to the type checker for better developer UX. - return EmbedProxy(getattr(self, '_footer', {})) # type: ignore - - def set_footer(self, *, text: Optional[Any] = None, icon_url: Optional[Any] = None) -> Self: - """Sets the footer for the embed content. - - This function returns the class instance to allow for fluent-style - chaining. - - Parameters - ----------- - text: :class:`str` - The footer text. Can only be up to 2048 characters. - icon_url: :class:`str` - The URL of the footer icon. Only HTTP(S) is supported. - Inline attachment URLs are also supported, see :ref:`local_image`. - """ - - self._footer = {} - if text is not None: - self._footer['text'] = str(text) - - if icon_url is not None: - self._footer['icon_url'] = str(icon_url) - - return self - - def remove_footer(self) -> Self: - """Clears embed's footer information. - - This function returns the class instance to allow for fluent-style - chaining. - - .. versionadded:: 2.0 - """ - try: - del self._footer - except AttributeError: - pass - - return self - - @property - def image(self) -> _EmbedMediaProxy: - """Returns an ``EmbedProxy`` denoting the image contents. - - Possible attributes you can access are: - - - ``url`` - - ``proxy_url`` - - ``width`` - - ``height`` - - If the attribute has no value then ``None`` is returned. - """ - # Lying to the type checker for better developer UX. - return EmbedProxy(getattr(self, '_image', {})) # type: ignore - - def set_image(self, *, url: Optional[Any]) -> Self: - """Sets the image for the embed content. - - This function returns the class instance to allow for fluent-style - chaining. - - Parameters - ----------- - url: :class:`str` - The source URL for the image. Only HTTP(S) is supported. - Inline attachment URLs are also supported, see :ref:`local_image`. - """ - - if url is None: - try: - del self._image - except AttributeError: - pass - else: - self._image = { - 'url': str(url), - } - - return self - - @property - def thumbnail(self) -> _EmbedMediaProxy: - """Returns an ``EmbedProxy`` denoting the thumbnail contents. - - Possible attributes you can access are: - - - ``url`` - - ``proxy_url`` - - ``width`` - - ``height`` - - If the attribute has no value then ``None`` is returned. - """ - # Lying to the type checker for better developer UX. - return EmbedProxy(getattr(self, '_thumbnail', {})) # type: ignore - - def set_thumbnail(self, *, url: Optional[Any]) -> Self: - """Sets the thumbnail for the embed content. - - This function returns the class instance to allow for fluent-style - chaining. - - .. versionchanged:: 1.4 - Passing ``None`` removes the thumbnail. - - Parameters - ----------- - url: :class:`str` - The source URL for the thumbnail. Only HTTP(S) is supported. - Inline attachment URLs are also supported, see :ref:`local_image`. - """ - - if url is None: - try: - del self._thumbnail - except AttributeError: - pass - else: - self._thumbnail = { - 'url': str(url), - } - - return self - - @property - def video(self) -> _EmbedVideoProxy: - """Returns an ``EmbedProxy`` denoting the video contents. - - Possible attributes include: - - - ``url`` for the video URL. - - ``height`` for the video height. - - ``width`` for the video width. - - If the attribute has no value then ``None`` is returned. - """ - # Lying to the type checker for better developer UX. - return EmbedProxy(getattr(self, '_video', {})) # type: ignore - - @property - def provider(self) -> _EmbedProviderProxy: - """Returns an ``EmbedProxy`` denoting the provider contents. - - The only attributes that might be accessed are ``name`` and ``url``. - - If the attribute has no value then ``None`` is returned. - """ - # Lying to the type checker for better developer UX. - return EmbedProxy(getattr(self, '_provider', {})) # type: ignore - - @property - def author(self) -> _EmbedAuthorProxy: - """Returns an ``EmbedProxy`` denoting the author contents. - - See :meth:`set_author` for possible values you can access. - - If the attribute has no value then ``None`` is returned. - """ - # Lying to the type checker for better developer UX. - return EmbedProxy(getattr(self, '_author', {})) # type: ignore - - def set_author(self, *, name: Any, url: Optional[Any] = None, icon_url: Optional[Any] = None) -> Self: - """Sets the author for the embed content. - - This function returns the class instance to allow for fluent-style - chaining. - - Parameters - ----------- - name: :class:`str` - The name of the author. Can only be up to 256 characters. - url: :class:`str` - The URL for the author. - icon_url: :class:`str` - The URL of the author icon. Only HTTP(S) is supported. - Inline attachment URLs are also supported, see :ref:`local_image`. - """ - - self._author = { - 'name': str(name), - } - - if url is not None: - self._author['url'] = str(url) - - if icon_url is not None: - self._author['icon_url'] = str(icon_url) - - return self - - def remove_author(self) -> Self: - """Clears embed's author information. - - This function returns the class instance to allow for fluent-style - chaining. - - .. versionadded:: 1.4 - """ - try: - del self._author - except AttributeError: - pass - - return self - - @property - def fields(self) -> List[_EmbedFieldProxy]: - """List[``EmbedProxy``]: Returns a :class:`list` of ``EmbedProxy`` denoting the field contents. - - See :meth:`add_field` for possible values you can access. - - If the attribute has no value then ``None`` is returned. - """ - # Lying to the type checker for better developer UX. - return [EmbedProxy(d) for d in getattr(self, '_fields', [])] # type: ignore - - def add_field(self, *, name: Any, value: Any, inline: bool = True) -> Self: - """Adds a field to the embed object. - - This function returns the class instance to allow for fluent-style - chaining. Can only be up to 25 fields. - - Parameters - ----------- - name: :class:`str` - The name of the field. Can only be up to 256 characters. - value: :class:`str` - The value of the field. Can only be up to 1024 characters. - inline: :class:`bool` - Whether the field should be displayed inline. - """ - - field = { - 'inline': inline, - 'name': str(name), - 'value': str(value), - } - - try: - self._fields.append(field) - except AttributeError: - self._fields = [field] - - return self - - def insert_field_at(self, index: int, *, name: Any, value: Any, inline: bool = True) -> Self: - """Inserts a field before a specified index to the embed. - - This function returns the class instance to allow for fluent-style - chaining. Can only be up to 25 fields. - - .. versionadded:: 1.2 - - Parameters - ----------- - index: :class:`int` - The index of where to insert the field. - name: :class:`str` - The name of the field. Can only be up to 256 characters. - value: :class:`str` - The value of the field. Can only be up to 1024 characters. - inline: :class:`bool` - Whether the field should be displayed inline. - """ - - field = { - 'inline': inline, - 'name': str(name), - 'value': str(value), - } - - try: - self._fields.insert(index, field) - except AttributeError: - self._fields = [field] - - return self - - def clear_fields(self) -> Self: - """Removes all fields from this embed. - - This function returns the class instance to allow for fluent-style - chaining. - - .. versionchanged:: 2.0 - This function now returns the class instance. - """ - try: - self._fields.clear() - except AttributeError: - self._fields = [] - - return self - - def remove_field(self, index: int) -> Self: - """Removes a field at a specified index. - - If the index is invalid or out of bounds then the error is - silently swallowed. - - This function returns the class instance to allow for fluent-style - chaining. - - .. note:: - - When deleting a field by index, the index of the other fields - shift to fill the gap just like a regular list. - - .. versionchanged:: 2.0 - This function now returns the class instance. - - Parameters - ----------- - index: :class:`int` - The index of the field to remove. - """ - try: - del self._fields[index] - except (AttributeError, IndexError): - pass - - return self - - def set_field_at(self, index: int, *, name: Any, value: Any, inline: bool = True) -> Self: - """Modifies a field to the embed object. - - The index must point to a valid pre-existing field. Can only be up to 25 fields. - - This function returns the class instance to allow for fluent-style - chaining. - - Parameters - ----------- - index: :class:`int` - The index of the field to modify. - name: :class:`str` - The name of the field. Can only be up to 256 characters. - value: :class:`str` - The value of the field. Can only be up to 1024 characters. - inline: :class:`bool` - Whether the field should be displayed inline. - - Raises - ------- - IndexError - An invalid index was provided. - """ - - try: - field = self._fields[index] - except (TypeError, IndexError, AttributeError): - raise IndexError('field index out of range') - - field['name'] = str(name) - field['value'] = str(value) - field['inline'] = inline - return self - - def to_dict(self) -> EmbedData: - """Converts this embed object into a dict.""" - - # add in the raw data into the dict - # fmt: off - result = { - key[1:]: getattr(self, key) - for key in self.__slots__ - if key[0] == '_' and hasattr(self, key) - } - # fmt: on - - # deal with basic convenience wrappers - - try: - colour = result.pop('colour') - except KeyError: - pass - else: - if colour: - result['color'] = colour.value - - try: - timestamp = result.pop('timestamp') - except KeyError: - pass - else: - if timestamp: - if timestamp.tzinfo: - result['timestamp'] = timestamp.astimezone(tz=datetime.timezone.utc).isoformat() - else: - result['timestamp'] = timestamp.replace(tzinfo=datetime.timezone.utc).isoformat() - - # add in the non raw attribute ones - if self.type: - result['type'] = self.type - - if self.description: - result['description'] = self.description - - if self.url: - result['url'] = self.url - - if self.title: - result['title'] = self.title - - return result # type: ignore # This payload is equivalent to the EmbedData type diff --git a/.venv/Lib/site-packages/discord/emoji.py b/.venv/Lib/site-packages/discord/emoji.py deleted file mode 100644 index 045486d..0000000 --- a/.venv/Lib/site-packages/discord/emoji.py +++ /dev/null @@ -1,257 +0,0 @@ -""" -The MIT License (MIT) - -Copyright (c) 2015-present Rapptz - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the "Software"), -to deal in the Software without restriction, including without limitation -the rights to use, copy, modify, merge, publish, distribute, sublicense, -and/or sell copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. -""" - -from __future__ import annotations -from typing import Any, Collection, Iterator, List, Optional, TYPE_CHECKING, Tuple - -from .asset import Asset, AssetMixin -from .utils import SnowflakeList, snowflake_time, MISSING -from .partial_emoji import _EmojiTag, PartialEmoji -from .user import User - -# fmt: off -__all__ = ( - 'Emoji', -) -# fmt: on - -if TYPE_CHECKING: - from .types.emoji import Emoji as EmojiPayload - from .guild import Guild - from .state import ConnectionState - from .abc import Snowflake - from .role import Role - from datetime import datetime - - -class Emoji(_EmojiTag, AssetMixin): - """Represents a custom emoji. - - Depending on the way this object was created, some of the attributes can - have a value of ``None``. - - .. container:: operations - - .. describe:: x == y - - Checks if two emoji are the same. - - .. describe:: x != y - - Checks if two emoji are not the same. - - .. describe:: hash(x) - - Return the emoji's hash. - - .. describe:: iter(x) - - Returns an iterator of ``(field, value)`` pairs. This allows this class - to be used as an iterable in list/dict/etc constructions. - - .. describe:: str(x) - - Returns the emoji rendered for discord. - - Attributes - ----------- - name: :class:`str` - The name of the emoji. - id: :class:`int` - The emoji's ID. - require_colons: :class:`bool` - If colons are required to use this emoji in the client (:PJSalt: vs PJSalt). - animated: :class:`bool` - Whether an emoji is animated or not. - managed: :class:`bool` - If this emoji is managed by a Twitch integration. - guild_id: :class:`int` - The guild ID the emoji belongs to. - available: :class:`bool` - Whether the emoji is available for use. - user: Optional[:class:`User`] - The user that created the emoji. This can only be retrieved using :meth:`Guild.fetch_emoji` and - having :attr:`~Permissions.manage_emojis`. - """ - - __slots__: Tuple[str, ...] = ( - 'require_colons', - 'animated', - 'managed', - 'id', - 'name', - '_roles', - 'guild_id', - '_state', - 'user', - 'available', - ) - - def __init__(self, *, guild: Guild, state: ConnectionState, data: EmojiPayload) -> None: - self.guild_id: int = guild.id - self._state: ConnectionState = state - self._from_data(data) - - def _from_data(self, emoji: EmojiPayload) -> None: - self.require_colons: bool = emoji.get('require_colons', False) - self.managed: bool = emoji.get('managed', False) - self.id: int = int(emoji['id']) # type: ignore # This won't be None for full emoji objects. - self.name: str = emoji['name'] # type: ignore # This won't be None for full emoji objects. - self.animated: bool = emoji.get('animated', False) - self.available: bool = emoji.get('available', True) - self._roles: SnowflakeList = SnowflakeList(map(int, emoji.get('roles', []))) - user = emoji.get('user') - self.user: Optional[User] = User(state=self._state, data=user) if user else None - - def _to_partial(self) -> PartialEmoji: - return PartialEmoji(name=self.name, animated=self.animated, id=self.id) - - def __iter__(self) -> Iterator[Tuple[str, Any]]: - for attr in self.__slots__: - if attr[0] != '_': - value = getattr(self, attr, None) - if value is not None: - yield (attr, value) - - def __str__(self) -> str: - if self.animated: - return f'' - return f'<:{self.name}:{self.id}>' - - def __repr__(self) -> str: - return f'' - - def __eq__(self, other: object) -> bool: - return isinstance(other, _EmojiTag) and self.id == other.id - - def __ne__(self, other: object) -> bool: - return not self.__eq__(other) - - def __hash__(self) -> int: - return self.id >> 22 - - @property - def created_at(self) -> datetime: - """:class:`datetime.datetime`: Returns the emoji's creation time in UTC.""" - return snowflake_time(self.id) - - @property - def url(self) -> str: - """:class:`str`: Returns the URL of the emoji.""" - fmt = 'gif' if self.animated else 'png' - return f'{Asset.BASE}/emojis/{self.id}.{fmt}' - - @property - def roles(self) -> List[Role]: - """List[:class:`Role`]: A :class:`list` of roles that is allowed to use this emoji. - - If roles is empty, the emoji is unrestricted. - """ - guild = self.guild - if guild is None: - return [] - - return [role for role in guild.roles if self._roles.has(role.id)] - - @property - def guild(self) -> Optional[Guild]: - """:class:`Guild`: The guild this emoji belongs to.""" - return self._state._get_guild(self.guild_id) - - def is_usable(self) -> bool: - """:class:`bool`: Whether the bot can use this emoji. - - .. versionadded:: 1.3 - """ - if not self.available or not self.guild or self.guild.unavailable: - return False - if not self._roles: - return True - emoji_roles, my_roles = self._roles, self.guild.me._roles - return any(my_roles.has(role_id) for role_id in emoji_roles) - - async def delete(self, *, reason: Optional[str] = None) -> None: - """|coro| - - Deletes the custom emoji. - - You must have :attr:`~Permissions.manage_emojis` to do this. - - Parameters - ----------- - reason: Optional[:class:`str`] - The reason for deleting this emoji. Shows up on the audit log. - - Raises - ------- - Forbidden - You are not allowed to delete emojis. - HTTPException - An error occurred deleting the emoji. - """ - - await self._state.http.delete_custom_emoji(self.guild_id, self.id, reason=reason) - - async def edit( - self, *, name: str = MISSING, roles: Collection[Snowflake] = MISSING, reason: Optional[str] = None - ) -> Emoji: - r"""|coro| - - Edits the custom emoji. - - You must have :attr:`~Permissions.manage_emojis` to do this. - - .. versionchanged:: 2.0 - The newly updated emoji is returned. - - Parameters - ----------- - name: :class:`str` - The new emoji name. - roles: List[:class:`~discord.abc.Snowflake`] - A list of roles that can use this emoji. An empty list can be passed to make it available to everyone. - reason: Optional[:class:`str`] - The reason for editing this emoji. Shows up on the audit log. - - Raises - ------- - Forbidden - You are not allowed to edit emojis. - HTTPException - An error occurred editing the emoji. - - Returns - -------- - :class:`Emoji` - The newly updated emoji. - """ - - payload = {} - if name is not MISSING: - payload['name'] = name - if roles is not MISSING: - payload['roles'] = [role.id for role in roles] - - data = await self._state.http.edit_custom_emoji(self.guild_id, self.id, payload=payload, reason=reason) - return Emoji(guild=self.guild, data=data, state=self._state) # type: ignore # if guild is None, the http request would have failed diff --git a/.venv/Lib/site-packages/discord/enums.py b/.venv/Lib/site-packages/discord/enums.py deleted file mode 100644 index 81d5cb4..0000000 --- a/.venv/Lib/site-packages/discord/enums.py +++ /dev/null @@ -1,776 +0,0 @@ -""" -The MIT License (MIT) - -Copyright (c) 2015-present Rapptz - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the "Software"), -to deal in the Software without restriction, including without limitation -the rights to use, copy, modify, merge, publish, distribute, sublicense, -and/or sell copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. -""" -from __future__ import annotations - -import types -from collections import namedtuple -from typing import Any, ClassVar, Dict, List, Optional, TYPE_CHECKING, Tuple, Type, TypeVar, Iterator, Mapping - -__all__ = ( - 'Enum', - 'ChannelType', - 'MessageType', - 'SpeakingState', - 'VerificationLevel', - 'ContentFilter', - 'Status', - 'DefaultAvatar', - 'AuditLogAction', - 'AuditLogActionCategory', - 'UserFlags', - 'ActivityType', - 'NotificationLevel', - 'TeamMembershipState', - 'WebhookType', - 'ExpireBehaviour', - 'ExpireBehavior', - 'StickerType', - 'StickerFormatType', - 'InviteTarget', - 'VideoQualityMode', - 'ComponentType', - 'ButtonStyle', - 'TextStyle', - 'PrivacyLevel', - 'InteractionType', - 'InteractionResponseType', - 'NSFWLevel', - 'MFALevel', - 'Locale', - 'EntityType', - 'EventStatus', - 'AppCommandType', - 'AppCommandOptionType', - 'AppCommandPermissionType', - 'AutoModRuleTriggerType', - 'AutoModRuleEventType', - 'AutoModRuleActionType', - 'ForumLayoutType', - 'ForumOrderType', -) - -if TYPE_CHECKING: - from typing_extensions import Self - - -def _create_value_cls(name: str, comparable: bool): - # All the type ignores here are due to the type checker being unable to recognise - # Runtime type creation without exploding. - cls = namedtuple('_EnumValue_' + name, 'name value') - cls.__repr__ = lambda self: f'<{name}.{self.name}: {self.value!r}>' # type: ignore - cls.__str__ = lambda self: f'{name}.{self.name}' # type: ignore - if comparable: - cls.__le__ = lambda self, other: isinstance(other, self.__class__) and self.value <= other.value # type: ignore - cls.__ge__ = lambda self, other: isinstance(other, self.__class__) and self.value >= other.value # type: ignore - cls.__lt__ = lambda self, other: isinstance(other, self.__class__) and self.value < other.value # type: ignore - cls.__gt__ = lambda self, other: isinstance(other, self.__class__) and self.value > other.value # type: ignore - return cls - - -def _is_descriptor(obj): - return hasattr(obj, '__get__') or hasattr(obj, '__set__') or hasattr(obj, '__delete__') - - -class EnumMeta(type): - if TYPE_CHECKING: - __name__: ClassVar[str] - _enum_member_names_: ClassVar[List[str]] - _enum_member_map_: ClassVar[Dict[str, Any]] - _enum_value_map_: ClassVar[Dict[Any, Any]] - - def __new__(cls, name: str, bases: Tuple[type, ...], attrs: Dict[str, Any], *, comparable: bool = False) -> Self: - value_mapping = {} - member_mapping = {} - member_names = [] - - value_cls = _create_value_cls(name, comparable) - for key, value in list(attrs.items()): - is_descriptor = _is_descriptor(value) - if key[0] == '_' and not is_descriptor: - continue - - # Special case classmethod to just pass through - if isinstance(value, classmethod): - continue - - if is_descriptor: - setattr(value_cls, key, value) - del attrs[key] - continue - - try: - new_value = value_mapping[value] - except KeyError: - new_value = value_cls(name=key, value=value) - value_mapping[value] = new_value - member_names.append(key) - - member_mapping[key] = new_value - attrs[key] = new_value - - attrs['_enum_value_map_'] = value_mapping - attrs['_enum_member_map_'] = member_mapping - attrs['_enum_member_names_'] = member_names - attrs['_enum_value_cls_'] = value_cls - actual_cls = super().__new__(cls, name, bases, attrs) - value_cls._actual_enum_cls_ = actual_cls # type: ignore # Runtime attribute isn't understood - return actual_cls - - def __iter__(cls) -> Iterator[Any]: - return (cls._enum_member_map_[name] for name in cls._enum_member_names_) - - def __reversed__(cls) -> Iterator[Any]: - return (cls._enum_member_map_[name] for name in reversed(cls._enum_member_names_)) - - def __len__(cls) -> int: - return len(cls._enum_member_names_) - - def __repr__(cls) -> str: - return f'' - - @property - def __members__(cls) -> Mapping[str, Any]: - return types.MappingProxyType(cls._enum_member_map_) - - def __call__(cls, value: str) -> Any: - try: - return cls._enum_value_map_[value] - except (KeyError, TypeError): - raise ValueError(f"{value!r} is not a valid {cls.__name__}") - - def __getitem__(cls, key: str) -> Any: - return cls._enum_member_map_[key] - - def __setattr__(cls, name: str, value: Any) -> None: - raise TypeError('Enums are immutable.') - - def __delattr__(cls, attr: str) -> None: - raise TypeError('Enums are immutable') - - def __instancecheck__(self, instance: Any) -> bool: - # isinstance(x, Y) - # -> __instancecheck__(Y, x) - try: - return instance._actual_enum_cls_ is self - except AttributeError: - return False - - -if TYPE_CHECKING: - from enum import Enum -else: - - class Enum(metaclass=EnumMeta): - @classmethod - def try_value(cls, value): - try: - return cls._enum_value_map_[value] - except (KeyError, TypeError): - return value - - -class ChannelType(Enum): - text = 0 - private = 1 - voice = 2 - group = 3 - category = 4 - news = 5 - news_thread = 10 - public_thread = 11 - private_thread = 12 - stage_voice = 13 - forum = 15 - - def __str__(self) -> str: - return self.name - - -class MessageType(Enum): - default = 0 - recipient_add = 1 - recipient_remove = 2 - call = 3 - channel_name_change = 4 - channel_icon_change = 5 - pins_add = 6 - new_member = 7 - premium_guild_subscription = 8 - premium_guild_tier_1 = 9 - premium_guild_tier_2 = 10 - premium_guild_tier_3 = 11 - channel_follow_add = 12 - guild_stream = 13 - guild_discovery_disqualified = 14 - guild_discovery_requalified = 15 - guild_discovery_grace_period_initial_warning = 16 - guild_discovery_grace_period_final_warning = 17 - thread_created = 18 - reply = 19 - chat_input_command = 20 - thread_starter_message = 21 - guild_invite_reminder = 22 - context_menu_command = 23 - auto_moderation_action = 24 - role_subscription_purchase = 25 - interaction_premium_upsell = 26 - stage_start = 27 - stage_end = 28 - stage_speaker = 29 - stage_raise_hand = 30 - stage_topic = 31 - guild_application_premium_subscription = 32 - - -class SpeakingState(Enum): - none = 0 - voice = 1 - soundshare = 2 - priority = 4 - - def __str__(self) -> str: - return self.name - - def __int__(self) -> int: - return self.value - - -class VerificationLevel(Enum, comparable=True): - none = 0 - low = 1 - medium = 2 - high = 3 - highest = 4 - - def __str__(self) -> str: - return self.name - - -class ContentFilter(Enum, comparable=True): - disabled = 0 - no_role = 1 - all_members = 2 - - def __str__(self) -> str: - return self.name - - -class Status(Enum): - online = 'online' - offline = 'offline' - idle = 'idle' - dnd = 'dnd' - do_not_disturb = 'dnd' - invisible = 'invisible' - - def __str__(self) -> str: - return self.value - - -class DefaultAvatar(Enum): - blurple = 0 - grey = 1 - gray = 1 - green = 2 - orange = 3 - red = 4 - pink = 5 - - def __str__(self) -> str: - return self.name - - -class NotificationLevel(Enum, comparable=True): - all_messages = 0 - only_mentions = 1 - - -class AuditLogActionCategory(Enum): - create = 1 - delete = 2 - update = 3 - - -class AuditLogAction(Enum): - # fmt: off - guild_update = 1 - channel_create = 10 - channel_update = 11 - channel_delete = 12 - overwrite_create = 13 - overwrite_update = 14 - overwrite_delete = 15 - kick = 20 - member_prune = 21 - ban = 22 - unban = 23 - member_update = 24 - member_role_update = 25 - member_move = 26 - member_disconnect = 27 - bot_add = 28 - role_create = 30 - role_update = 31 - role_delete = 32 - invite_create = 40 - invite_update = 41 - invite_delete = 42 - webhook_create = 50 - webhook_update = 51 - webhook_delete = 52 - emoji_create = 60 - emoji_update = 61 - emoji_delete = 62 - message_delete = 72 - message_bulk_delete = 73 - message_pin = 74 - message_unpin = 75 - integration_create = 80 - integration_update = 81 - integration_delete = 82 - stage_instance_create = 83 - stage_instance_update = 84 - stage_instance_delete = 85 - sticker_create = 90 - sticker_update = 91 - sticker_delete = 92 - scheduled_event_create = 100 - scheduled_event_update = 101 - scheduled_event_delete = 102 - thread_create = 110 - thread_update = 111 - thread_delete = 112 - app_command_permission_update = 121 - automod_rule_create = 140 - automod_rule_update = 141 - automod_rule_delete = 142 - automod_block_message = 143 - automod_flag_message = 144 - automod_timeout_member = 145 - # fmt: on - - @property - def category(self) -> Optional[AuditLogActionCategory]: - # fmt: off - lookup: Dict[AuditLogAction, Optional[AuditLogActionCategory]] = { - AuditLogAction.guild_update: AuditLogActionCategory.update, - AuditLogAction.channel_create: AuditLogActionCategory.create, - AuditLogAction.channel_update: AuditLogActionCategory.update, - AuditLogAction.channel_delete: AuditLogActionCategory.delete, - AuditLogAction.overwrite_create: AuditLogActionCategory.create, - AuditLogAction.overwrite_update: AuditLogActionCategory.update, - AuditLogAction.overwrite_delete: AuditLogActionCategory.delete, - AuditLogAction.kick: None, - AuditLogAction.member_prune: None, - AuditLogAction.ban: None, - AuditLogAction.unban: None, - AuditLogAction.member_update: AuditLogActionCategory.update, - AuditLogAction.member_role_update: AuditLogActionCategory.update, - AuditLogAction.member_move: None, - AuditLogAction.member_disconnect: None, - AuditLogAction.bot_add: None, - AuditLogAction.role_create: AuditLogActionCategory.create, - AuditLogAction.role_update: AuditLogActionCategory.update, - AuditLogAction.role_delete: AuditLogActionCategory.delete, - AuditLogAction.invite_create: AuditLogActionCategory.create, - AuditLogAction.invite_update: AuditLogActionCategory.update, - AuditLogAction.invite_delete: AuditLogActionCategory.delete, - AuditLogAction.webhook_create: AuditLogActionCategory.create, - AuditLogAction.webhook_update: AuditLogActionCategory.update, - AuditLogAction.webhook_delete: AuditLogActionCategory.delete, - AuditLogAction.emoji_create: AuditLogActionCategory.create, - AuditLogAction.emoji_update: AuditLogActionCategory.update, - AuditLogAction.emoji_delete: AuditLogActionCategory.delete, - AuditLogAction.message_delete: AuditLogActionCategory.delete, - AuditLogAction.message_bulk_delete: AuditLogActionCategory.delete, - AuditLogAction.message_pin: None, - AuditLogAction.message_unpin: None, - AuditLogAction.integration_create: AuditLogActionCategory.create, - AuditLogAction.integration_update: AuditLogActionCategory.update, - AuditLogAction.integration_delete: AuditLogActionCategory.delete, - AuditLogAction.stage_instance_create: AuditLogActionCategory.create, - AuditLogAction.stage_instance_update: AuditLogActionCategory.update, - AuditLogAction.stage_instance_delete: AuditLogActionCategory.delete, - AuditLogAction.sticker_create: AuditLogActionCategory.create, - AuditLogAction.sticker_update: AuditLogActionCategory.update, - AuditLogAction.sticker_delete: AuditLogActionCategory.delete, - AuditLogAction.scheduled_event_create: AuditLogActionCategory.create, - AuditLogAction.scheduled_event_update: AuditLogActionCategory.update, - AuditLogAction.scheduled_event_delete: AuditLogActionCategory.delete, - AuditLogAction.thread_create: AuditLogActionCategory.create, - AuditLogAction.thread_delete: AuditLogActionCategory.delete, - AuditLogAction.thread_update: AuditLogActionCategory.update, - AuditLogAction.app_command_permission_update: AuditLogActionCategory.update, - AuditLogAction.automod_rule_create: AuditLogActionCategory.create, - AuditLogAction.automod_rule_update: AuditLogActionCategory.update, - AuditLogAction.automod_rule_delete: AuditLogActionCategory.delete, - AuditLogAction.automod_block_message: None, - AuditLogAction.automod_flag_message: None, - AuditLogAction.automod_timeout_member: None, - } - # fmt: on - return lookup[self] - - @property - def target_type(self) -> Optional[str]: - v = self.value - if v == -1: - return 'all' - elif v < 10: - return 'guild' - elif v < 20: - return 'channel' - elif v < 30: - return 'user' - elif v < 40: - return 'role' - elif v < 50: - return 'invite' - elif v < 60: - return 'webhook' - elif v < 70: - return 'emoji' - elif v == 73: - return 'channel' - elif v < 80: - return 'message' - elif v < 83: - return 'integration' - elif v < 90: - return 'stage_instance' - elif v < 93: - return 'sticker' - elif v < 103: - return 'guild_scheduled_event' - elif v < 113: - return 'thread' - elif v < 122: - return 'integration_or_app_command' - elif v < 143: - return 'auto_moderation' - elif v < 146: - return 'user' - - -class UserFlags(Enum): - staff = 1 - partner = 2 - hypesquad = 4 - bug_hunter = 8 - mfa_sms = 16 - premium_promo_dismissed = 32 - hypesquad_bravery = 64 - hypesquad_brilliance = 128 - hypesquad_balance = 256 - early_supporter = 512 - team_user = 1024 - system = 4096 - has_unread_urgent_messages = 8192 - bug_hunter_level_2 = 16384 - verified_bot = 65536 - verified_bot_developer = 131072 - discord_certified_moderator = 262144 - bot_http_interactions = 524288 - spammer = 1048576 - active_developer = 4194304 - - -class ActivityType(Enum): - unknown = -1 - playing = 0 - streaming = 1 - listening = 2 - watching = 3 - custom = 4 - competing = 5 - - def __int__(self) -> int: - return self.value - - -class TeamMembershipState(Enum): - invited = 1 - accepted = 2 - - -class WebhookType(Enum): - incoming = 1 - channel_follower = 2 - application = 3 - - -class ExpireBehaviour(Enum): - remove_role = 0 - kick = 1 - - -ExpireBehavior = ExpireBehaviour - - -class StickerType(Enum): - standard = 1 - guild = 2 - - -class StickerFormatType(Enum): - png = 1 - apng = 2 - lottie = 3 - gif = 4 - - @property - def file_extension(self) -> str: - # fmt: off - lookup: Dict[StickerFormatType, str] = { - StickerFormatType.png: 'png', - StickerFormatType.apng: 'png', - StickerFormatType.lottie: 'json', - StickerFormatType.gif: 'gif', - } - # fmt: on - return lookup.get(self, 'png') - - -class InviteTarget(Enum): - unknown = 0 - stream = 1 - embedded_application = 2 - - -class InteractionType(Enum): - ping = 1 - application_command = 2 - component = 3 - autocomplete = 4 - modal_submit = 5 - - -class InteractionResponseType(Enum): - pong = 1 - # ack = 2 (deprecated) - # channel_message = 3 (deprecated) - channel_message = 4 # (with source) - deferred_channel_message = 5 # (with source) - deferred_message_update = 6 # for components - message_update = 7 # for components - autocomplete_result = 8 - modal = 9 # for modals - - -class VideoQualityMode(Enum): - auto = 1 - full = 2 - - def __int__(self) -> int: - return self.value - - -class ComponentType(Enum): - action_row = 1 - button = 2 - select = 3 - string_select = 3 - text_input = 4 - user_select = 5 - role_select = 6 - mentionable_select = 7 - channel_select = 8 - - def __int__(self) -> int: - return self.value - - -class ButtonStyle(Enum): - primary = 1 - secondary = 2 - success = 3 - danger = 4 - link = 5 - - # Aliases - blurple = 1 - grey = 2 - gray = 2 - green = 3 - red = 4 - url = 5 - - def __int__(self) -> int: - return self.value - - -class TextStyle(Enum): - short = 1 - paragraph = 2 - - # Aliases - long = 2 - - def __int__(self) -> int: - return self.value - - -class PrivacyLevel(Enum): - guild_only = 2 - - -class NSFWLevel(Enum, comparable=True): - default = 0 - explicit = 1 - safe = 2 - age_restricted = 3 - - -class MFALevel(Enum, comparable=True): - disabled = 0 - require_2fa = 1 - - -class Locale(Enum): - american_english = 'en-US' - british_english = 'en-GB' - bulgarian = 'bg' - chinese = 'zh-CN' - taiwan_chinese = 'zh-TW' - croatian = 'hr' - czech = 'cs' - indonesian = 'id' - danish = 'da' - dutch = 'nl' - finnish = 'fi' - french = 'fr' - german = 'de' - greek = 'el' - hindi = 'hi' - hungarian = 'hu' - italian = 'it' - japanese = 'ja' - korean = 'ko' - lithuanian = 'lt' - norwegian = 'no' - polish = 'pl' - brazil_portuguese = 'pt-BR' - romanian = 'ro' - russian = 'ru' - spain_spanish = 'es-ES' - swedish = 'sv-SE' - thai = 'th' - turkish = 'tr' - ukrainian = 'uk' - vietnamese = 'vi' - - def __str__(self) -> str: - return self.value - - -E = TypeVar('E', bound='Enum') - - -class EntityType(Enum): - stage_instance = 1 - voice = 2 - external = 3 - - -class EventStatus(Enum): - scheduled = 1 - active = 2 - completed = 3 - canceled = 4 - - ended = 3 - cancelled = 4 - - -class AppCommandOptionType(Enum): - subcommand = 1 - subcommand_group = 2 - string = 3 - integer = 4 - boolean = 5 - user = 6 - channel = 7 - role = 8 - mentionable = 9 - number = 10 - attachment = 11 - - -class AppCommandType(Enum): - chat_input = 1 - user = 2 - message = 3 - - -class AppCommandPermissionType(Enum): - role = 1 - user = 2 - channel = 3 - - -class AutoModRuleTriggerType(Enum): - keyword = 1 - harmful_link = 2 - spam = 3 - keyword_preset = 4 - mention_spam = 5 - - -class AutoModRuleEventType(Enum): - message_send = 1 - - -class AutoModRuleActionType(Enum): - block_message = 1 - send_alert_message = 2 - timeout = 3 - - -class ForumLayoutType(Enum): - not_set = 0 - list_view = 1 - gallery_view = 2 - - -class ForumOrderType(Enum): - latest_activity = 0 - creation_date = 1 - - -def create_unknown_value(cls: Type[E], val: Any) -> E: - value_cls = cls._enum_value_cls_ # type: ignore # This is narrowed below - name = f'unknown_{val}' - return value_cls(name=name, value=val) - - -def try_enum(cls: Type[E], val: Any) -> E: - """A function that tries to turn the value into enum ``cls``. - - If it fails it returns a proxy invalid value instead. - """ - - try: - return cls._enum_value_map_[val] # type: ignore # All errors are caught below - except (KeyError, TypeError, AttributeError): - return create_unknown_value(cls, val) diff --git a/.venv/Lib/site-packages/discord/errors.py b/.venv/Lib/site-packages/discord/errors.py deleted file mode 100644 index 6035ace..0000000 --- a/.venv/Lib/site-packages/discord/errors.py +++ /dev/null @@ -1,280 +0,0 @@ -""" -The MIT License (MIT) - -Copyright (c) 2015-present Rapptz - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the "Software"), -to deal in the Software without restriction, including without limitation -the rights to use, copy, modify, merge, publish, distribute, sublicense, -and/or sell copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. -""" - -from __future__ import annotations -from typing import Dict, List, Optional, TYPE_CHECKING, Any, Tuple, Union - -if TYPE_CHECKING: - from aiohttp import ClientResponse, ClientWebSocketResponse - from requests import Response - - _ResponseType = Union[ClientResponse, Response] - - from .interactions import Interaction - -__all__ = ( - 'DiscordException', - 'ClientException', - 'GatewayNotFound', - 'HTTPException', - 'RateLimited', - 'Forbidden', - 'NotFound', - 'DiscordServerError', - 'InvalidData', - 'LoginFailure', - 'ConnectionClosed', - 'PrivilegedIntentsRequired', - 'InteractionResponded', -) - - -class DiscordException(Exception): - """Base exception class for discord.py - - Ideally speaking, this could be caught to handle any exceptions raised from this library. - """ - - pass - - -class ClientException(DiscordException): - """Exception that's raised when an operation in the :class:`Client` fails. - - These are usually for exceptions that happened due to user input. - """ - - pass - - -class GatewayNotFound(DiscordException): - """An exception that is raised when the gateway for Discord could not be found""" - - def __init__(self): - message = 'The gateway to connect to discord was not found.' - super().__init__(message) - - -def _flatten_error_dict(d: Dict[str, Any], key: str = '') -> Dict[str, str]: - items: List[Tuple[str, str]] = [] - for k, v in d.items(): - new_key = key + '.' + k if key else k - - if isinstance(v, dict): - try: - _errors: List[Dict[str, Any]] = v['_errors'] - except KeyError: - items.extend(_flatten_error_dict(v, new_key).items()) - else: - items.append((new_key, ' '.join(x.get('message', '') for x in _errors))) - else: - items.append((new_key, v)) - - return dict(items) - - -class HTTPException(DiscordException): - """Exception that's raised when an HTTP request operation fails. - - Attributes - ------------ - response: :class:`aiohttp.ClientResponse` - The response of the failed HTTP request. This is an - instance of :class:`aiohttp.ClientResponse`. In some cases - this could also be a :class:`requests.Response`. - - text: :class:`str` - The text of the error. Could be an empty string. - status: :class:`int` - The status code of the HTTP request. - code: :class:`int` - The Discord specific error code for the failure. - """ - - def __init__(self, response: _ResponseType, message: Optional[Union[str, Dict[str, Any]]]): - self.response: _ResponseType = response - self.status: int = response.status # type: ignore # This attribute is filled by the library even if using requests - self.code: int - self.text: str - if isinstance(message, dict): - self.code = message.get('code', 0) - base = message.get('message', '') - errors = message.get('errors') - self._errors: Optional[Dict[str, Any]] = errors - if errors: - errors = _flatten_error_dict(errors) - helpful = '\n'.join('In %s: %s' % t for t in errors.items()) - self.text = base + '\n' + helpful - else: - self.text = base - else: - self.text = message or '' - self.code = 0 - - fmt = '{0.status} {0.reason} (error code: {1})' - if len(self.text): - fmt += ': {2}' - - super().__init__(fmt.format(self.response, self.code, self.text)) - - -class RateLimited(DiscordException): - """Exception that's raised for when status code 429 occurs - and the timeout is greater than the configured maximum using - the ``max_ratelimit_timeout`` parameter in :class:`Client`. - - This is not raised during global ratelimits. - - Since sometimes requests are halted pre-emptively before they're - even made, this **does not** subclass :exc:`HTTPException`. - - .. versionadded:: 2.0 - - Attributes - ------------ - retry_after: :class:`float` - The amount of seconds that the client should wait before retrying - the request. - """ - - def __init__(self, retry_after: float): - self.retry_after = retry_after - super().__init__(f'Too many requests. Retry in {retry_after:.2f} seconds.') - - -class Forbidden(HTTPException): - """Exception that's raised for when status code 403 occurs. - - Subclass of :exc:`HTTPException` - """ - - pass - - -class NotFound(HTTPException): - """Exception that's raised for when status code 404 occurs. - - Subclass of :exc:`HTTPException` - """ - - pass - - -class DiscordServerError(HTTPException): - """Exception that's raised for when a 500 range status code occurs. - - Subclass of :exc:`HTTPException`. - - .. versionadded:: 1.5 - """ - - pass - - -class InvalidData(ClientException): - """Exception that's raised when the library encounters unknown - or invalid data from Discord. - """ - - pass - - -class LoginFailure(ClientException): - """Exception that's raised when the :meth:`Client.login` function - fails to log you in from improper credentials or some other misc. - failure. - """ - - pass - - -class ConnectionClosed(ClientException): - """Exception that's raised when the gateway connection is - closed for reasons that could not be handled internally. - - Attributes - ----------- - code: :class:`int` - The close code of the websocket. - reason: :class:`str` - The reason provided for the closure. - shard_id: Optional[:class:`int`] - The shard ID that got closed if applicable. - """ - - def __init__(self, socket: ClientWebSocketResponse, *, shard_id: Optional[int], code: Optional[int] = None): - # This exception is just the same exception except - # reconfigured to subclass ClientException for users - self.code: int = code or socket.close_code or -1 - # aiohttp doesn't seem to consistently provide close reason - self.reason: str = '' - self.shard_id: Optional[int] = shard_id - super().__init__(f'Shard ID {self.shard_id} WebSocket closed with {self.code}') - - -class PrivilegedIntentsRequired(ClientException): - """Exception that's raised when the gateway is requesting privileged intents - but they're not ticked in the developer page yet. - - Go to https://discord.com/developers/applications/ and enable the intents - that are required. Currently these are as follows: - - - :attr:`Intents.members` - - :attr:`Intents.presences` - - :attr:`Intents.message_content` - - Attributes - ----------- - shard_id: Optional[:class:`int`] - The shard ID that got closed if applicable. - """ - - def __init__(self, shard_id: Optional[int]): - self.shard_id: Optional[int] = shard_id - msg = ( - 'Shard ID %s is requesting privileged intents that have not been explicitly enabled in the ' - 'developer portal. It is recommended to go to https://discord.com/developers/applications/ ' - 'and explicitly enable the privileged intents within your application\'s page. If this is not ' - 'possible, then consider disabling the privileged intents instead.' - ) - super().__init__(msg % shard_id) - - -class InteractionResponded(ClientException): - """Exception that's raised when sending another interaction response using - :class:`InteractionResponse` when one has already been done before. - - An interaction can only respond once. - - .. versionadded:: 2.0 - - Attributes - ----------- - interaction: :class:`Interaction` - The interaction that's already been responded to. - """ - - def __init__(self, interaction: Interaction): - self.interaction: Interaction = interaction - super().__init__('This interaction has already been responded to before') diff --git a/.venv/Lib/site-packages/discord/ext/commands/__init__.py b/.venv/Lib/site-packages/discord/ext/commands/__init__.py deleted file mode 100644 index 08dab54..0000000 --- a/.venv/Lib/site-packages/discord/ext/commands/__init__.py +++ /dev/null @@ -1,21 +0,0 @@ -""" -discord.ext.commands -~~~~~~~~~~~~~~~~~~~~~ - -An extension module to facilitate creation of bot commands. - -:copyright: (c) 2015-present Rapptz -:license: MIT, see LICENSE for more details. -""" - -from .bot import * -from .cog import * -from .context import * -from .converter import * -from .cooldowns import * -from .core import * -from .errors import * -from .flags import * -from .help import * -from .parameters import * -from .hybrid import * diff --git a/.venv/Lib/site-packages/discord/ext/commands/__pycache__/__init__.cpython-311.pyc b/.venv/Lib/site-packages/discord/ext/commands/__pycache__/__init__.cpython-311.pyc deleted file mode 100644 index 1f1b64a..0000000 Binary files a/.venv/Lib/site-packages/discord/ext/commands/__pycache__/__init__.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/ext/commands/__pycache__/_types.cpython-311.pyc b/.venv/Lib/site-packages/discord/ext/commands/__pycache__/_types.cpython-311.pyc deleted file mode 100644 index 8f0100f..0000000 Binary files a/.venv/Lib/site-packages/discord/ext/commands/__pycache__/_types.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/ext/commands/__pycache__/bot.cpython-311.pyc b/.venv/Lib/site-packages/discord/ext/commands/__pycache__/bot.cpython-311.pyc deleted file mode 100644 index d9b6873..0000000 Binary files a/.venv/Lib/site-packages/discord/ext/commands/__pycache__/bot.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/ext/commands/__pycache__/cog.cpython-311.pyc b/.venv/Lib/site-packages/discord/ext/commands/__pycache__/cog.cpython-311.pyc deleted file mode 100644 index dfc1d53..0000000 Binary files a/.venv/Lib/site-packages/discord/ext/commands/__pycache__/cog.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/ext/commands/__pycache__/context.cpython-311.pyc b/.venv/Lib/site-packages/discord/ext/commands/__pycache__/context.cpython-311.pyc deleted file mode 100644 index a66ffa4..0000000 Binary files a/.venv/Lib/site-packages/discord/ext/commands/__pycache__/context.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/ext/commands/__pycache__/converter.cpython-311.pyc b/.venv/Lib/site-packages/discord/ext/commands/__pycache__/converter.cpython-311.pyc deleted file mode 100644 index 5528f3a..0000000 Binary files a/.venv/Lib/site-packages/discord/ext/commands/__pycache__/converter.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/ext/commands/__pycache__/cooldowns.cpython-311.pyc b/.venv/Lib/site-packages/discord/ext/commands/__pycache__/cooldowns.cpython-311.pyc deleted file mode 100644 index bd56684..0000000 Binary files a/.venv/Lib/site-packages/discord/ext/commands/__pycache__/cooldowns.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/ext/commands/__pycache__/core.cpython-311.pyc b/.venv/Lib/site-packages/discord/ext/commands/__pycache__/core.cpython-311.pyc deleted file mode 100644 index 13a6da8..0000000 Binary files a/.venv/Lib/site-packages/discord/ext/commands/__pycache__/core.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/ext/commands/__pycache__/errors.cpython-311.pyc b/.venv/Lib/site-packages/discord/ext/commands/__pycache__/errors.cpython-311.pyc deleted file mode 100644 index eb2edb6..0000000 Binary files a/.venv/Lib/site-packages/discord/ext/commands/__pycache__/errors.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/ext/commands/__pycache__/flags.cpython-311.pyc b/.venv/Lib/site-packages/discord/ext/commands/__pycache__/flags.cpython-311.pyc deleted file mode 100644 index 180a09b..0000000 Binary files a/.venv/Lib/site-packages/discord/ext/commands/__pycache__/flags.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/ext/commands/__pycache__/help.cpython-311.pyc b/.venv/Lib/site-packages/discord/ext/commands/__pycache__/help.cpython-311.pyc deleted file mode 100644 index 527896e..0000000 Binary files a/.venv/Lib/site-packages/discord/ext/commands/__pycache__/help.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/ext/commands/__pycache__/hybrid.cpython-311.pyc b/.venv/Lib/site-packages/discord/ext/commands/__pycache__/hybrid.cpython-311.pyc deleted file mode 100644 index 18191e9..0000000 Binary files a/.venv/Lib/site-packages/discord/ext/commands/__pycache__/hybrid.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/ext/commands/__pycache__/parameters.cpython-311.pyc b/.venv/Lib/site-packages/discord/ext/commands/__pycache__/parameters.cpython-311.pyc deleted file mode 100644 index f7052db..0000000 Binary files a/.venv/Lib/site-packages/discord/ext/commands/__pycache__/parameters.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/ext/commands/__pycache__/view.cpython-311.pyc b/.venv/Lib/site-packages/discord/ext/commands/__pycache__/view.cpython-311.pyc deleted file mode 100644 index 71d295f..0000000 Binary files a/.venv/Lib/site-packages/discord/ext/commands/__pycache__/view.cpython-311.pyc and /dev/null differ diff --git a/.venv/Lib/site-packages/discord/ext/commands/_types.py b/.venv/Lib/site-packages/discord/ext/commands/_types.py deleted file mode 100644 index 1331c9f..0000000 --- a/.venv/Lib/site-packages/discord/ext/commands/_types.py +++ /dev/null @@ -1,73 +0,0 @@ -""" -The MIT License (MIT) - -Copyright (c) 2015-present Rapptz - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the "Software"), -to deal in the Software without restriction, including without limitation -the rights to use, copy, modify, merge, publish, distribute, sublicense, -and/or sell copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. -""" - - -from typing import Any, Awaitable, Callable, Coroutine, TYPE_CHECKING, Protocol, TypeVar, Union, Tuple, Optional - - -T = TypeVar('T') - -if TYPE_CHECKING: - from typing_extensions import ParamSpec - - from .bot import Bot, AutoShardedBot - from .context import Context - from .cog import Cog - from .errors import CommandError - - P = ParamSpec('P') - MaybeAwaitableFunc = Callable[P, 'MaybeAwaitable[T]'] -else: - P = TypeVar('P') - MaybeAwaitableFunc = Tuple[P, T] - -_Bot = Union['Bot', 'AutoShardedBot'] -Coro = Coroutine[Any, Any, T] -CoroFunc = Callable[..., Coro[Any]] -MaybeCoro = Union[T, Coro[T]] -MaybeAwaitable = Union[T, Awaitable[T]] - -CogT = TypeVar('CogT', bound='Optional[Cog]') -UserCheck = Callable[["ContextT"], MaybeCoro[bool]] -Hook = Union[Callable[["CogT", "ContextT"], Coro[Any]], Callable[["ContextT"], Coro[Any]]] -Error = Union[Callable[["CogT", "ContextT", "CommandError"], Coro[Any]], Callable[["ContextT", "CommandError"], Coro[Any]]] - -ContextT = TypeVar('ContextT', bound='Context[Any]') -BotT = TypeVar('BotT', bound=_Bot, covariant=True) - -ContextT_co = TypeVar('ContextT_co', bound='Context[Any]', covariant=True) - - -class Check(Protocol[ContextT_co]): # type: ignore # TypeVar is expected to be invariant - - predicate: Callable[[ContextT_co], Coroutine[Any, Any, bool]] - - def __call__(self, coro_or_commands: T) -> T: - ... - - -# This is merely a tag type to avoid circular import issues. -# Yes, this is a terrible solution but ultimately it is the only solution. -class _BaseCommand: - __slots__ = () diff --git a/.venv/Lib/site-packages/discord/ext/commands/bot.py b/.venv/Lib/site-packages/discord/ext/commands/bot.py deleted file mode 100644 index 363b665..0000000 --- a/.venv/Lib/site-packages/discord/ext/commands/bot.py +++ /dev/null @@ -1,1500 +0,0 @@ -""" -The MIT License (MIT) - -Copyright (c) 2015-present Rapptz - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the "Software"), -to deal in the Software without restriction, including without limitation -the rights to use, copy, modify, merge, publish, distribute, sublicense, -and/or sell copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. -""" - -from __future__ import annotations - - -import asyncio -import collections -import collections.abc -import inspect -import importlib.util -import sys -import logging -import types -from typing import ( - Any, - Callable, - Mapping, - List, - Dict, - TYPE_CHECKING, - Optional, - Sequence, - TypeVar, - Type, - Union, - Iterable, - Collection, - overload, -) - -import discord -from discord import app_commands -from discord.app_commands.tree import _retrieve_guild_ids -from discord.utils import MISSING, _is_submodule - -from .core import GroupMixin -from .view import StringView -from .context import Context -from . import errors -from .help import HelpCommand, DefaultHelpCommand -from .cog import Cog -from .hybrid import hybrid_command, hybrid_group, HybridCommand, HybridGroup - -if TYPE_CHECKING: - from typing_extensions import Self - - import importlib.machinery - - from discord.message import Message - from discord.interactions import Interaction - from discord.abc import User, Snowflake - from ._types import ( - _Bot, - BotT, - UserCheck, - CoroFunc, - ContextT, - MaybeAwaitableFunc, - ) - from .core import Command - from .hybrid import CommandCallback, ContextT, P - - _Prefix = Union[Iterable[str], str] - _PrefixCallable = MaybeAwaitableFunc[[BotT, Message], _Prefix] - PrefixType = Union[_Prefix, _PrefixCallable[BotT]] - -__all__ = ( - 'when_mentioned', - 'when_mentioned_or', - 'Bot', - 'AutoShardedBot', -) - -T = TypeVar('T') -CFT = TypeVar('CFT', bound='CoroFunc') - -_log = logging.getLogger(__name__) - - -def when_mentioned(bot: _Bot, msg: Message, /) -> List[str]: - """A callable that implements a command prefix equivalent to being mentioned. - - These are meant to be passed into the :attr:`.Bot.command_prefix` attribute. - - .. versionchanged:: 2.0 - - ``bot`` and ``msg`` parameters are now positional-only. - """ - # bot.user will never be None when this is called - return [f'<@{bot.user.id}> ', f'<@!{bot.user.id}> '] # type: ignore - - -def when_mentioned_or(*prefixes: str) -> Callable[[_Bot, Message], List[str]]: - """A callable that implements when mentioned or other prefixes provided. - - These are meant to be passed into the :attr:`.Bot.command_prefix` attribute. - - Example - -------- - - .. code-block:: python3 - - bot = commands.Bot(command_prefix=commands.when_mentioned_or('!')) - - - .. note:: - - This callable returns another callable, so if this is done inside a custom - callable, you must call the returned callable, for example: - - .. code-block:: python3 - - async def get_prefix(bot, message): - extras = await prefixes_for(message.guild) # returns a list - return commands.when_mentioned_or(*extras)(bot, message) - - - See Also - ---------- - :func:`.when_mentioned` - """ - - def inner(bot, msg): - r = list(prefixes) - r = when_mentioned(bot, msg) + r - return r - - return inner - - -class _DefaultRepr: - def __repr__(self): - return '' - - -_default: Any = _DefaultRepr() - - -class BotBase(GroupMixin[None]): - def __init__( - self, - command_prefix: PrefixType[BotT], - *, - help_command: Optional[HelpCommand] = _default, - tree_cls: Type[app_commands.CommandTree[Any]] = app_commands.CommandTree, - description: Optional[str] = None, - intents: discord.Intents, - **options: Any, - ) -> None: - super().__init__(intents=intents, **options) - self.command_prefix: PrefixType[BotT] = command_prefix - self.extra_events: Dict[str, List[CoroFunc]] = {} - # Self doesn't have the ClientT bound, but since this is a mixin it technically does - self.__tree: app_commands.CommandTree[Self] = tree_cls(self) # type: ignore - self.__cogs: Dict[str, Cog] = {} - self.__extensions: Dict[str, types.ModuleType] = {} - self._checks: List[UserCheck] = [] - self._check_once: List[UserCheck] = [] - self._before_invoke: Optional[CoroFunc] = None - self._after_invoke: Optional[CoroFunc] = None - self._help_command: Optional[HelpCommand] = None - self.description: str = inspect.cleandoc(description) if description else '' - self.owner_id: Optional[int] = options.get('owner_id') - self.owner_ids: Optional[Collection[int]] = options.get('owner_ids', set()) - self.strip_after_prefix: bool = options.get('strip_after_prefix', False) - - if self.owner_id and self.owner_ids: - raise TypeError('Both owner_id and owner_ids are set.') - - if self.owner_ids and not isinstance(self.owner_ids, collections.abc.Collection): - raise TypeError(f'owner_ids must be a collection not {self.owner_ids.__class__.__name__}') - - if help_command is _default: - self.help_command = DefaultHelpCommand() - else: - self.help_command = help_command - - # internal helpers - - async def _async_setup_hook(self) -> None: - # self/super() resolves to Client/AutoShardedClient - await super()._async_setup_hook() # type: ignore - prefix = self.command_prefix - - # This has to be here because for the default logging set up to capture - # the logging calls, they have to come after the `Client.run` call. - # The best place to do this is in an async init scenario - if not self.intents.message_content: # type: ignore - trigger_warning = ( - (callable(prefix) and prefix is not when_mentioned) - or isinstance(prefix, str) - or (isinstance(prefix, collections.abc.Iterable) and len(list(prefix)) >= 1) - ) - if trigger_warning: - _log.warning('Privileged message content intent is missing, commands may not work as expected.') - - def dispatch(self, event_name: str, /, *args: Any, **kwargs: Any) -> None: - # super() will resolve to Client - super().dispatch(event_name, *args, **kwargs) # type: ignore - ev = 'on_' + event_name - for event in self.extra_events.get(ev, []): - self._schedule_event(event, ev, *args, **kwargs) # type: ignore - - @discord.utils.copy_doc(discord.Client.close) - async def close(self) -> None: - for extension in tuple(self.__extensions): - try: - await self.unload_extension(extension) - except Exception: - pass - - for cog in tuple(self.__cogs): - try: - await self.remove_cog(cog) - except Exception: - pass - - await super().close() # type: ignore - - # GroupMixin overrides - - @discord.utils.copy_doc(GroupMixin.add_command) - def add_command(self, command: Command[Any, ..., Any], /) -> None: - super().add_command(command) - if isinstance(command, (HybridCommand, HybridGroup)) and command.app_command: - # If a cog is also inheriting from app_commands.Group then it'll also - # add the hybrid commands as text commands, which would recursively add the - # hybrid commands as slash commands. This check just terminates that recursion - # from happening - if command.cog is None or not command.cog.__cog_is_app_commands_group__: - self.tree.add_command(command.app_command) - - @discord.utils.copy_doc(GroupMixin.remove_command) - def remove_command(self, name: str, /) -> Optional[Command[Any, ..., Any]]: - cmd: Optional[Command[Any, ..., Any]] = super().remove_command(name) - if isinstance(cmd, (HybridCommand, HybridGroup)) and cmd.app_command: - # See above - if cmd.cog is not None and cmd.cog.__cog_is_app_commands_group__: - return cmd - - guild_ids: Optional[List[int]] = cmd.app_command._guild_ids - if guild_ids is None: - self.__tree.remove_command(name) - else: - for guild_id in guild_ids: - self.__tree.remove_command(name, guild=discord.Object(id=guild_id)) - - return cmd - - def hybrid_command( - self, - name: Union[str, app_commands.locale_str] = MISSING, - with_app_command: bool = True, - *args: Any, - **kwargs: Any, - ) -> Callable[[CommandCallback[Any, ContextT, P, T]], HybridCommand[Any, P, T]]: - """A shortcut decorator that invokes :func:`~discord.ext.commands.hybrid_command` and adds it to - the internal command list via :meth:`add_command`. - - Returns - -------- - Callable[..., :class:`HybridCommand`] - A decorator that converts the provided method into a Command, adds it to the bot, then returns it. - """ - - def decorator(func: CommandCallback[Any, ContextT, P, T]): - kwargs.setdefault('parent', self) - result = hybrid_command(name=name, *args, with_app_command=with_app_command, **kwargs)(func) - self.add_command(result) - return result - - return decorator - - def hybrid_group( - self, - name: Union[str, app_commands.locale_str] = MISSING, - with_app_command: bool = True, - *args: Any, - **kwargs: Any, - ) -> Callable[[CommandCallback[Any, ContextT, P, T]], HybridGroup[Any, P, T]]: - """A shortcut decorator that invokes :func:`~discord.ext.commands.hybrid_group` and adds it to - the internal command list via :meth:`add_command`. - - Returns - -------- - Callable[..., :class:`HybridGroup`] - A decorator that converts the provided method into a Group, adds it to the bot, then returns it. - """ - - def decorator(func: CommandCallback[Any, ContextT, P, T]): - kwargs.setdefault('parent', self) - result = hybrid_group(name=name, *args, with_app_command=with_app_command, **kwargs)(func) - self.add_command(result) - return result - - return decorator - - # Error handler - - async def on_command_error(self, context: Context[BotT], exception: errors.CommandError, /) -> None: - """|coro| - - The default command error handler provided by the bot. - - By default this logs to the library logger, however it could be - overridden to have a different implementation. - - This only fires if you do not specify any listeners for command error. - - .. versionchanged:: 2.0 - - ``context`` and ``exception`` parameters are now positional-only. - Instead of writing to ``sys.stderr`` this now uses the library logger. - """ - if self.extra_events.get('on_command_error', None): - return - - command = context.command - if command and command.has_error_handler(): - return - - cog = context.cog - if cog and cog.has_error_handler(): - return - - _log.error('Ignoring exception in command %s', command, exc_info=exception) - - # global check registration - - def check(self, func: T, /) -> T: - r"""A decorator that adds a global check to the bot. - - A global check is similar to a :func:`.check` that is applied - on a per command basis except it is run before any command checks - have been verified and applies to every command the bot has. - - .. note:: - - This function can either be a regular function or a coroutine. - - Similar to a command :func:`.check`\, this takes a single parameter - of type :class:`.Context` and can only raise exceptions inherited from - :exc:`.CommandError`. - - Example - --------- - - .. code-block:: python3 - - @bot.check - def check_commands(ctx): - return ctx.command.qualified_name in allowed_commands - - .. versionchanged:: 2.0 - - ``func`` parameter is now positional-only. - """ - # T was used instead of Check to ensure the type matches on return - self.add_check(func) # type: ignore - return func - - def add_check(self, func: UserCheck[ContextT], /, *, call_once: bool = False) -> None: - """Adds a global check to the bot. - - This is the non-decorator interface to :meth:`.check` - and :meth:`.check_once`. - - .. versionchanged:: 2.0 - - ``func`` parameter is now positional-only. - - .. seealso:: The :func:`~discord.ext.commands.check` decorator - - Parameters - ----------- - func - The function that was used as a global check. - call_once: :class:`bool` - If the function should only be called once per - :meth:`.invoke` call. - """ - - if call_once: - self._check_once.append(func) - else: - self._checks.append(func) - - def remove_check(self, func: UserCheck[ContextT], /, *, call_once: bool = False) -> None: - """Removes a global check from the bot. - - This function is idempotent and will not raise an exception - if the function is not in the global checks. - - .. versionchanged:: 2.0 - - ``func`` parameter is now positional-only. - - Parameters - ----------- - func - The function to remove from the global checks. - call_once: :class:`bool` - If the function was added with ``call_once=True`` in - the :meth:`.Bot.add_check` call or using :meth:`.check_once`. - """ - l = self._check_once if call_once else self._checks - - try: - l.remove(func) - except ValueError: - pass - - def check_once(self, func: CFT, /) -> CFT: - r"""A decorator that adds a "call once" global check to the bot. - - Unlike regular global checks, this one is called only once - per :meth:`.invoke` call. - - Regular global checks are called whenever a command is called - or :meth:`.Command.can_run` is called. This type of check - bypasses that and ensures that it's called only once, even inside - the default help command. - - .. note:: - - When using this function the :class:`.Context` sent to a group subcommand - may only parse the parent command and not the subcommands due to it - being invoked once per :meth:`.Bot.invoke` call. - - .. note:: - - This function can either be a regular function or a coroutine. - - Similar to a command :func:`.check`\, this takes a single parameter - of type :class:`.Context` and can only raise exceptions inherited from - :exc:`.CommandError`. - - Example - --------- - - .. code-block:: python3 - - @bot.check_once - def whitelist(ctx): - return ctx.message.author.id in my_whitelist - - .. versionchanged:: 2.0 - - ``func`` parameter is now positional-only. - - """ - self.add_check(func, call_once=True) - return func - - async def can_run(self, ctx: Context[BotT], /, *, call_once: bool = False) -> bool: - data = self._check_once if call_once else self._checks - - if len(data) == 0: - return True - - return await discord.utils.async_all(f(ctx) for f in data) - - async def is_owner(self, user: User, /) -> bool: - """|coro| - - Checks if a :class:`~discord.User` or :class:`~discord.Member` is the owner of - this bot. - - If an :attr:`owner_id` is not set, it is fetched automatically - through the use of :meth:`~.Bot.application_info`. - - .. versionchanged:: 1.3 - The function also checks if the application is team-owned if - :attr:`owner_ids` is not set. - - .. versionchanged:: 2.0 - - ``user`` parameter is now positional-only. - - Parameters - ----------- - user: :class:`.abc.User` - The user to check for. - - Returns - -------- - :class:`bool` - Whether the user is the owner. - """ - - if self.owner_id: - return user.id == self.owner_id - elif self.owner_ids: - return user.id in self.owner_ids - else: - - app = await self.application_info() # type: ignore - if app.team: - self.owner_ids = ids = {m.id for m in app.team.members} - return user.id in ids - else: - self.owner_id = owner_id = app.owner.id - return user.id == owner_id - - def before_invoke(self, coro: CFT, /) -> CFT: - """A decorator that registers a coroutine as a pre-invoke hook. - - A pre-invoke hook is called directly before the command is - called. This makes it a useful function to set up database - connections or any type of set up required. - - This pre-invoke hook takes a sole parameter, a :class:`.Context`. - - .. note:: - - The :meth:`~.Bot.before_invoke` and :meth:`~.Bot.after_invoke` hooks are - only called if all checks and argument parsing procedures pass - without error. If any check or argument parsing procedures fail - then the hooks are not called. - - .. versionchanged:: 2.0 - - ``coro`` parameter is now positional-only. - - Parameters - ----------- - coro: :ref:`coroutine ` - The coroutine to register as the pre-invoke hook. - - Raises - ------- - TypeError - The coroutine passed is not actually a coroutine. - """ - if not asyncio.iscoroutinefunction(coro): - raise TypeError('The pre-invoke hook must be a coroutine.') - - self._before_invoke = coro - return coro - - def after_invoke(self, coro: CFT, /) -> CFT: - r"""A decorator that registers a coroutine as a post-invoke hook. - - A post-invoke hook is called directly after the command is - called. This makes it a useful function to clean-up database - connections or any type of clean up required. - - This post-invoke hook takes a sole parameter, a :class:`.Context`. - - .. note:: - - Similar to :meth:`~.Bot.before_invoke`\, this is not called unless - checks and argument parsing procedures succeed. This hook is, - however, **always** called regardless of the internal command - callback raising an error (i.e. :exc:`.CommandInvokeError`\). - This makes it ideal for clean-up scenarios. - - .. versionchanged:: 2.0 - - ``coro`` parameter is now positional-only. - - Parameters - ----------- - coro: :ref:`coroutine ` - The coroutine to register as the post-invoke hook. - - Raises - ------- - TypeError - The coroutine passed is not actually a coroutine. - """ - if not asyncio.iscoroutinefunction(coro): - raise TypeError('The post-invoke hook must be a coroutine.') - - self._after_invoke = coro - return coro - - # listener registration - - def add_listener(self, func: CoroFunc, /, name: str = MISSING) -> None: - """The non decorator alternative to :meth:`.listen`. - - .. versionchanged:: 2.0 - - ``func`` parameter is now positional-only. - - Parameters - ----------- - func: :ref:`coroutine ` - The function to call. - name: :class:`str` - The name of the event to listen for. Defaults to ``func.__name__``. - - Example - -------- - - .. code-block:: python3 - - async def on_ready(): pass - async def my_message(message): pass - - bot.add_listener(on_ready) - bot.add_listener(my_message, 'on_message') - - """ - name = func.__name__ if name is MISSING else name - - if not asyncio.iscoroutinefunction(func): - raise TypeError('Listeners must be coroutines') - - if name in self.extra_events: - self.extra_events[name].append(func) - else: - self.extra_events[name] = [func] - - def remove_listener(self, func: CoroFunc, /, name: str = MISSING) -> None: - """Removes a listener from the pool of listeners. - - .. versionchanged:: 2.0 - - ``func`` parameter is now positional-only. - - Parameters - ----------- - func - The function that was used as a listener to remove. - name: :class:`str` - The name of the event we want to remove. Defaults to - ``func.__name__``. - """ - - name = func.__name__ if name is MISSING else name - - if name in self.extra_events: - try: - self.extra_events[name].remove(func) - except ValueError: - pass - - def listen(self, name: str = MISSING) -> Callable[[CFT], CFT]: - """A decorator that registers another function as an external - event listener. Basically this allows you to listen to multiple - events from different places e.g. such as :func:`.on_ready` - - The functions being listened to must be a :ref:`coroutine `. - - Example - -------- - - .. code-block:: python3 - - @bot.listen() - async def on_message(message): - print('one') - - # in some other file... - - @bot.listen('on_message') - async def my_message(message): - print('two') - - Would print one and two in an unspecified order. - - Raises - ------- - TypeError - The function being listened to is not a coroutine. - """ - - def decorator(func: CFT) -> CFT: - self.add_listener(func, name) - return func - - return decorator - - # cogs - - async def add_cog( - self, - cog: Cog, - /, - *, - override: bool = False, - guild: Optional[Snowflake] = MISSING, - guilds: Sequence[Snowflake] = MISSING, - ) -> None: - """|coro| - - Adds a "cog" to the bot. - - A cog is a class that has its own event listeners and commands. - - If the cog is a :class:`.app_commands.Group` then it is added to - the bot's :class:`~discord.app_commands.CommandTree` as well. - - .. note:: - - Exceptions raised inside a :class:`.Cog`'s :meth:`~.Cog.cog_load` method will be - propagated to the caller. - - .. versionchanged:: 2.0 - - :exc:`.ClientException` is raised when a cog with the same name - is already loaded. - - .. versionchanged:: 2.0 - - ``cog`` parameter is now positional-only. - - .. versionchanged:: 2.0 - - This method is now a :term:`coroutine`. - - Parameters - ----------- - cog: :class:`.Cog` - The cog to register to the bot. - override: :class:`bool` - If a previously loaded cog with the same name should be ejected - instead of raising an error. - - .. versionadded:: 2.0 - guild: Optional[:class:`~discord.abc.Snowflake`] - If the cog is an application command group, then this would be the - guild where the cog group would be added to. If not given then - it becomes a global command instead. - - .. versionadded:: 2.0 - guilds: List[:class:`~discord.abc.Snowflake`] - If the cog is an application command group, then this would be the - guilds where the cog group would be added to. If not given then - it becomes a global command instead. Cannot be mixed with - ``guild``. - - .. versionadded:: 2.0 - - Raises - ------- - TypeError - The cog does not inherit from :class:`.Cog`. - CommandError - An error happened during loading. - ClientException - A cog with the same name is already loaded. - """ - - if not isinstance(cog, Cog): - raise TypeError('cogs must derive from Cog') - - cog_name = cog.__cog_name__ - existing = self.__cogs.get(cog_name) - - if existing is not None: - if not override: - raise discord.ClientException(f'Cog named {cog_name!r} already loaded') - await self.remove_cog(cog_name, guild=guild, guilds=guilds) - - if cog.__cog_app_commands_group__: - self.__tree.add_command(cog.__cog_app_commands_group__, override=override, guild=guild, guilds=guilds) - - cog = await cog._inject(self, override=override, guild=guild, guilds=guilds) - self.__cogs[cog_name] = cog - - def get_cog(self, name: str, /) -> Optional[Cog]: - """Gets the cog instance requested. - - If the cog is not found, ``None`` is returned instead. - - .. versionchanged:: 2.0 - - ``name`` parameter is now positional-only. - - Parameters - ----------- - name: :class:`str` - The name of the cog you are requesting. - This is equivalent to the name passed via keyword - argument in class creation or the class name if unspecified. - - Returns - -------- - Optional[:class:`Cog`] - The cog that was requested. If not found, returns ``None``. - """ - return self.__cogs.get(name) - - async def remove_cog( - self, - name: str, - /, - *, - guild: Optional[Snowflake] = MISSING, - guilds: Sequence[Snowflake] = MISSING, - ) -> Optional[Cog]: - """|coro| - - Removes a cog from the bot and returns it. - - All registered commands and event listeners that the - cog has registered will be removed as well. - - If no cog is found then this method has no effect. - - .. versionchanged:: 2.0 - - ``name`` parameter is now positional-only. - - .. versionchanged:: 2.0 - - This method is now a :term:`coroutine`. - - Parameters - ----------- - name: :class:`str` - The name of the cog to remove. - guild: Optional[:class:`~discord.abc.Snowflake`] - If the cog is an application command group, then this would be the - guild where the cog group would be removed from. If not given then - a global command is removed instead instead. - - .. versionadded:: 2.0 - guilds: List[:class:`~discord.abc.Snowflake`] - If the cog is an application command group, then this would be the - guilds where the cog group would be removed from. If not given then - a global command is removed instead instead. Cannot be mixed with - ``guild``. - - .. versionadded:: 2.0 - - Returns - ------- - Optional[:class:`.Cog`] - The cog that was removed. ``None`` if not found. - """ - - cog = self.__cogs.pop(name, None) - if cog is None: - return - - help_command = self._help_command - if help_command and help_command.cog is cog: - help_command.cog = None - - guild_ids = _retrieve_guild_ids(cog, guild, guilds) - if cog.__cog_app_commands_group__: - if guild_ids is None: - self.__tree.remove_command(name) - else: - for guild_id in guild_ids: - self.__tree.remove_command(name, guild=discord.Object(guild_id)) - - await cog._eject(self, guild_ids=guild_ids) - - return cog - - @property - def cogs(self) -> Mapping[str, Cog]: - """Mapping[:class:`str`, :class:`Cog`]: A read-only mapping of cog name to cog.""" - return types.MappingProxyType(self.__cogs) - - # extensions - - async def _remove_module_references(self, name: str) -> None: - # find all references to the module - # remove the cogs registered from the module - for cogname, cog in self.__cogs.copy().items(): - if _is_submodule(name, cog.__module__): - await self.remove_cog(cogname) - - # remove all the commands from the module - for cmd in self.all_commands.copy().values(): - if cmd.module is not None and _is_submodule(name, cmd.module): - if isinstance(cmd, GroupMixin): - cmd.recursively_remove_all_commands() - self.remove_command(cmd.name) - - # remove all the listeners from the module - for event_list in self.extra_events.copy().values(): - remove = [] - for index, event in enumerate(event_list): - if event.__module__ is not None and _is_submodule(name, event.__module__): - remove.append(index) - - for index in reversed(remove): - del event_list[index] - - # remove all relevant application commands from the tree - self.__tree._remove_with_module(name) - - async def _call_module_finalizers(self, lib: types.ModuleType, key: str) -> None: - try: - func = getattr(lib, 'teardown') - except AttributeError: - pass - else: - try: - await func(self) - except Exception: - pass - finally: - self.__extensions.pop(key, None) - sys.modules.pop(key, None) - name = lib.__name__ - for module in list(sys.modules.keys()): - if _is_submodule(name, module): - del sys.modules[module] - - async def _load_from_module_spec(self, spec: importlib.machinery.ModuleSpec, key: str) -> None: - # precondition: key not in self.__extensions - lib = importlib.util.module_from_spec(spec) - sys.modules[key] = lib - try: - spec.loader.exec_module(lib) # type: ignore - except Exception as e: - del sys.modules[key] - raise errors.ExtensionFailed(key, e) from e - - try: - setup = getattr(lib, 'setup') - except AttributeError: - del sys.modules[key] - raise errors.NoEntryPointError(key) - - try: - await setup(self) - except Exception as e: - del sys.modules[key] - await self._remove_module_references(lib.__name__) - await self._call_module_finalizers(lib, key) - raise errors.ExtensionFailed(key, e) from e - else: - self.__extensions[key] = lib - - def _resolve_name(self, name: str, package: Optional[str]) -> str: - try: - return importlib.util.resolve_name(name, package) - except ImportError: - raise errors.ExtensionNotFound(name) - - async def load_extension(self, name: str, *, package: Optional[str] = None) -> None: - """|coro| - - Loads an extension. - - An extension is a python module that contains commands, cogs, or - listeners. - - An extension must have a global function, ``setup`` defined as - the entry point on what to do when the extension is loaded. This entry - point must have a single argument, the ``bot``. - - .. versionchanged:: 2.0 - - This method is now a :term:`coroutine`. - - Parameters - ------------ - name: :class:`str` - The extension name to load. It must be dot separated like - regular Python imports if accessing a sub-module. e.g. - ``foo.test`` if you want to import ``foo/test.py``. - package: Optional[:class:`str`] - The package name to resolve relative imports with. - This is required when loading an extension using a relative path, e.g ``.foo.test``. - Defaults to ``None``. - - .. versionadded:: 1.7 - - Raises - -------- - ExtensionNotFound - The extension could not be imported. - This is also raised if the name of the extension could not - be resolved using the provided ``package`` parameter. - ExtensionAlreadyLoaded - The extension is already loaded. - NoEntryPointError - The extension does not have a setup function. - ExtensionFailed - The extension or its setup function had an execution error. - """ - - name = self._resolve_name(name, package) - if name in self.__extensions: - raise errors.ExtensionAlreadyLoaded(name) - - spec = importlib.util.find_spec(name) - if spec is None: - raise errors.ExtensionNotFound(name) - - await self._load_from_module_spec(spec, name) - - async def unload_extension(self, name: str, *, package: Optional[str] = None) -> None: - """|coro| - - Unloads an extension. - - When the extension is unloaded, all commands, listeners, and cogs are - removed from the bot and the module is un-imported. - - The extension can provide an optional global function, ``teardown``, - to do miscellaneous clean-up if necessary. This function takes a single - parameter, the ``bot``, similar to ``setup`` from - :meth:`~.Bot.load_extension`. - - .. versionchanged:: 2.0 - - This method is now a :term:`coroutine`. - - Parameters - ------------ - name: :class:`str` - The extension name to unload. It must be dot separated like - regular Python imports if accessing a sub-module. e.g. - ``foo.test`` if you want to import ``foo/test.py``. - package: Optional[:class:`str`] - The package name to resolve relative imports with. - This is required when unloading an extension using a relative path, e.g ``.foo.test``. - Defaults to ``None``. - - .. versionadded:: 1.7 - - Raises - ------- - ExtensionNotFound - The name of the extension could not - be resolved using the provided ``package`` parameter. - ExtensionNotLoaded - The extension was not loaded. - """ - - name = self._resolve_name(name, package) - lib = self.__extensions.get(name) - if lib is None: - raise errors.ExtensionNotLoaded(name) - - await self._remove_module_references(lib.__name__) - await self._call_module_finalizers(lib, name) - - async def reload_extension(self, name: str, *, package: Optional[str] = None) -> None: - """|coro| - - Atomically reloads an extension. - - This replaces the extension with the same extension, only refreshed. This is - equivalent to a :meth:`unload_extension` followed by a :meth:`load_extension` - except done in an atomic way. That is, if an operation fails mid-reload then - the bot will roll-back to the prior working state. - - Parameters - ------------ - name: :class:`str` - The extension name to reload. It must be dot separated like - regular Python imports if accessing a sub-module. e.g. - ``foo.test`` if you want to import ``foo/test.py``. - package: Optional[:class:`str`] - The package name to resolve relative imports with. - This is required when reloading an extension using a relative path, e.g ``.foo.test``. - Defaults to ``None``. - - .. versionadded:: 1.7 - - Raises - ------- - ExtensionNotLoaded - The extension was not loaded. - ExtensionNotFound - The extension could not be imported. - This is also raised if the name of the extension could not - be resolved using the provided ``package`` parameter. - NoEntryPointError - The extension does not have a setup function. - ExtensionFailed - The extension setup function had an execution error. - """ - - name = self._resolve_name(name, package) - lib = self.__extensions.get(name) - if lib is None: - raise errors.ExtensionNotLoaded(name) - - # get the previous module states from sys modules - # fmt: off - modules = { - name: module - for name, module in sys.modules.items() - if _is_submodule(lib.__name__, name) - } - # fmt: on - - try: - # Unload and then load the module... - await self._remove_module_references(lib.__name__) - await self._call_module_finalizers(lib, name) - await self.load_extension(name) - except Exception: - # if the load failed, the remnants should have been - # cleaned from the load_extension function call - # so let's load it from our old compiled library. - await lib.setup(self) - self.__extensions[name] = lib - - # revert sys.modules back to normal and raise back to caller - sys.modules.update(modules) - raise - - @property - def extensions(self) -> Mapping[str, types.ModuleType]: - """Mapping[:class:`str`, :class:`py:types.ModuleType`]: A read-only mapping of extension name to extension.""" - return types.MappingProxyType(self.__extensions) - - # help command stuff - - @property - def help_command(self) -> Optional[HelpCommand]: - return self._help_command - - @help_command.setter - def help_command(self, value: Optional[HelpCommand]) -> None: - if value is not None: - if not isinstance(value, HelpCommand): - raise TypeError('help_command must be a subclass of HelpCommand') - if self._help_command is not None: - self._help_command._remove_from_bot(self) - self._help_command = value - value._add_to_bot(self) - elif self._help_command is not None: - self._help_command._remove_from_bot(self) - self._help_command = None - else: - self._help_command = None - - # application command interop - - # As mentioned above, this is a mixin so the Self type hint fails here. - # However, since the only classes that can use this are subclasses of Client - # anyway, then this is sound. - @property - def tree(self) -> app_commands.CommandTree[Self]: # type: ignore - """:class:`~discord.app_commands.CommandTree`: The command tree responsible for handling the application commands - in this bot. - - .. versionadded:: 2.0 - """ - return self.__tree - - # command processing - - async def get_prefix(self, message: Message, /) -> Union[List[str], str]: - """|coro| - - Retrieves the prefix the bot is listening to - with the message as a context. - - .. versionchanged:: 2.0 - - ``message`` parameter is now positional-only. - - Parameters - ----------- - message: :class:`discord.Message` - The message context to get the prefix of. - - Returns - -------- - Union[List[:class:`str`], :class:`str`] - A list of prefixes or a single prefix that the bot is - listening for. - """ - prefix = ret = self.command_prefix - - if callable(prefix): - # self will be a Bot or AutoShardedBot - ret = await discord.utils.maybe_coroutine(prefix, self, message) # type: ignore - - if not isinstance(ret, str): - try: - ret = list(ret) # type: ignore - except TypeError: - # It's possible that a generator raised this exception. Don't - # replace it with our own error if that's the case. - if isinstance(ret, collections.abc.Iterable): - raise - - raise TypeError( - "command_prefix must be plain string, iterable of strings, or callable " - f"returning either of these, not {ret.__class__.__name__}" - ) - - return ret - - @overload - async def get_context( - self, - origin: Union[Message, Interaction], - /, - ) -> Context[Self]: # type: ignore - ... - - @overload - async def get_context( - self, - origin: Union[Message, Interaction], - /, - *, - cls: Type[ContextT], - ) -> ContextT: - ... - - async def get_context( - self, - origin: Union[Message, Interaction], - /, - *, - cls: Type[ContextT] = MISSING, - ) -> Any: - r"""|coro| - - Returns the invocation context from the message or interaction. - - This is a more low-level counter-part for :meth:`.process_commands` - to allow users more fine grained control over the processing. - - The returned context is not guaranteed to be a valid invocation - context, :attr:`.Context.valid` must be checked to make sure it is. - If the context is not valid then it is not a valid candidate to be - invoked under :meth:`~.Bot.invoke`. - - .. note:: - - In order for the custom context to be used inside an interaction-based - context (such as :class:`HybridCommand`) then this method must be - overridden to return that class. - - .. versionchanged:: 2.0 - - ``message`` parameter is now positional-only and renamed to ``origin``. - - Parameters - ----------- - origin: Union[:class:`discord.Message`, :class:`discord.Interaction`] - The message or interaction to get the invocation context from. - cls - The factory class that will be used to create the context. - By default, this is :class:`.Context`. Should a custom - class be provided, it must be similar enough to :class:`.Context`\'s - interface. - - Returns - -------- - :class:`.Context` - The invocation context. The type of this can change via the - ``cls`` parameter. - """ - if cls is MISSING: - cls = Context # type: ignore - - if isinstance(origin, discord.Interaction): - return await cls.from_interaction(origin) - - view = StringView(origin.content) - ctx = cls(prefix=None, view=view, bot=self, message=origin) - - if origin.author.id == self.user.id: # type: ignore - return ctx - - prefix = await self.get_prefix(origin) - invoked_prefix = prefix - - if isinstance(prefix, str): - if not view.skip_string(prefix): - return ctx - else: - try: - # if the context class' __init__ consumes something from the view this - # will be wrong. That seems unreasonable though. - if origin.content.startswith(tuple(prefix)): - invoked_prefix = discord.utils.find(view.skip_string, prefix) - else: - return ctx - - except TypeError: - if not isinstance(prefix, list): - raise TypeError( - "get_prefix must return either a string or a list of string, " f"not {prefix.__class__.__name__}" - ) - - # It's possible a bad command_prefix got us here. - for value in prefix: - if not isinstance(value, str): - raise TypeError( - "Iterable command_prefix or list returned from get_prefix must " - f"contain only strings, not {value.__class__.__name__}" - ) - - # Getting here shouldn't happen - raise - - if self.strip_after_prefix: - view.skip_ws() - - invoker = view.get_word() - ctx.invoked_with = invoker - # type-checker fails to narrow invoked_prefix type. - ctx.prefix = invoked_prefix # type: ignore - ctx.command = self.all_commands.get(invoker) - return ctx - - async def invoke(self, ctx: Context[BotT], /) -> None: - """|coro| - - Invokes the command given under the invocation context and - handles all the internal event dispatch mechanisms. - - .. versionchanged:: 2.0 - - ``ctx`` parameter is now positional-only. - - Parameters - ----------- - ctx: :class:`.Context` - The invocation context to invoke. - """ - if ctx.command is not None: - self.dispatch('command', ctx) - try: - if await self.can_run(ctx, call_once=True): - await ctx.command.invoke(ctx) - else: - raise errors.CheckFailure('The global check once functions failed.') - except errors.CommandError as exc: - await ctx.command.dispatch_error(ctx, exc) - else: - self.dispatch('command_completion', ctx) - elif ctx.invoked_with: - exc = errors.CommandNotFound(f'Command "{ctx.invoked_with}" is not found') - self.dispatch('command_error', ctx, exc) - - async def process_commands(self, message: Message, /) -> None: - """|coro| - - This function processes the commands that have been registered - to the bot and other groups. Without this coroutine, none of the - commands will be triggered. - - By default, this coroutine is called inside the :func:`.on_message` - event. If you choose to override the :func:`.on_message` event, then - you should invoke this coroutine as well. - - This is built using other low level tools, and is equivalent to a - call to :meth:`~.Bot.get_context` followed by a call to :meth:`~.Bot.invoke`. - - This also checks if the message's author is a bot and doesn't - call :meth:`~.Bot.get_context` or :meth:`~.Bot.invoke` if so. - - .. versionchanged:: 2.0 - - ``message`` parameter is now positional-only. - - Parameters - ----------- - message: :class:`discord.Message` - The message to process commands for. - """ - if message.author.bot: - return - - ctx = await self.get_context(message) - # the type of the invocation context's bot attribute will be correct - await self.invoke(ctx) # type: ignore - - async def on_message(self, message: Message, /) -> None: - await self.process_commands(message) - - -class Bot(BotBase, discord.Client): - """Represents a Discord bot. - - This class is a subclass of :class:`discord.Client` and as a result - anything that you can do with a :class:`discord.Client` you can do with - this bot. - - This class also subclasses :class:`.GroupMixin` to provide the functionality - to manage commands. - - Unlike :class:`discord.Client`, this class does not require manually setting - a :class:`~discord.app_commands.CommandTree` and is automatically set upon - instantiating the class. - - .. container:: operations - - .. describe:: async with x - - Asynchronously initialises the bot and automatically cleans up. - - .. versionadded:: 2.0 - - Attributes - ----------- - command_prefix - The command prefix is what the message content must contain initially - to have a command invoked. This prefix could either be a string to - indicate what the prefix should be, or a callable that takes in the bot - as its first parameter and :class:`discord.Message` as its second - parameter and returns the prefix. This is to facilitate "dynamic" - command prefixes. This callable can be either a regular function or - a coroutine. - - An empty string as the prefix always matches, enabling prefix-less - command invocation. While this may be useful in DMs it should be avoided - in servers, as it's likely to cause performance issues and unintended - command invocations. - - The command prefix could also be an iterable of strings indicating that - multiple checks for the prefix should be used and the first one to - match will be the invocation prefix. You can get this prefix via - :attr:`.Context.prefix`. - - .. note:: - - When passing multiple prefixes be careful to not pass a prefix - that matches a longer prefix occurring later in the sequence. For - example, if the command prefix is ``('!', '!?')`` the ``'!?'`` - prefix will never be matched to any message as the previous one - matches messages starting with ``!?``. This is especially important - when passing an empty string, it should always be last as no prefix - after it will be matched. - case_insensitive: :class:`bool` - Whether the commands should be case insensitive. Defaults to ``False``. This - attribute does not carry over to groups. You must set it to every group if - you require group commands to be case insensitive as well. - description: :class:`str` - The content prefixed into the default help message. - help_command: Optional[:class:`.HelpCommand`] - The help command implementation to use. This can be dynamically - set at runtime. To remove the help command pass ``None``. For more - information on implementing a help command, see :ref:`ext_commands_help_command`. - owner_id: Optional[:class:`int`] - The user ID that owns the bot. If this is not set and is then queried via - :meth:`.is_owner` then it is fetched automatically using - :meth:`~.Bot.application_info`. - owner_ids: Optional[Collection[:class:`int`]] - The user IDs that owns the bot. This is similar to :attr:`owner_id`. - If this is not set and the application is team based, then it is - fetched automatically using :meth:`~.Bot.application_info`. - For performance reasons it is recommended to use a :class:`set` - for the collection. You cannot set both ``owner_id`` and ``owner_ids``. - - .. versionadded:: 1.3 - strip_after_prefix: :class:`bool` - Whether to strip whitespace characters after encountering the command - prefix. This allows for ``! hello`` and ``!hello`` to both work if - the ``command_prefix`` is set to ``!``. Defaults to ``False``. - - .. versionadded:: 1.7 - tree_cls: Type[:class:`~discord.app_commands.CommandTree`] - The type of application command tree to use. Defaults to :class:`~discord.app_commands.CommandTree`. - - .. versionadded:: 2.0 - """ - - pass - - -class AutoShardedBot(BotBase, discord.AutoShardedClient): - """This is similar to :class:`.Bot` except that it is inherited from - :class:`discord.AutoShardedClient` instead. - - .. container:: operations - - .. describe:: async with x - - Asynchronously initialises the bot and automatically cleans. - - .. versionadded:: 2.0 - """ - - pass diff --git a/.venv/Lib/site-packages/discord/ext/commands/cog.py b/.venv/Lib/site-packages/discord/ext/commands/cog.py deleted file mode 100644 index 319f85b..0000000 --- a/.venv/Lib/site-packages/discord/ext/commands/cog.py +++ /dev/null @@ -1,787 +0,0 @@ -""" -The MIT License (MIT) - -Copyright (c) 2015-present Rapptz - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the "Software"), -to deal in the Software without restriction, including without limitation -the rights to use, copy, modify, merge, publish, distribute, sublicense, -and/or sell copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. -""" -from __future__ import annotations - -import inspect -import discord -from discord import app_commands -from discord.utils import maybe_coroutine, _to_kebab_case - -from typing import ( - Any, - Callable, - ClassVar, - Coroutine, - Dict, - Generator, - Iterable, - List, - Optional, - TYPE_CHECKING, - Sequence, - Tuple, - TypeVar, - Union, -) - -from ._types import _BaseCommand, BotT - -if TYPE_CHECKING: - from typing_extensions import Self - from discord.abc import Snowflake - from discord._types import ClientT - - from .bot import BotBase - from .context import Context - from .core import Command - -__all__ = ( - 'CogMeta', - 'Cog', - 'GroupCog', -) - -FuncT = TypeVar('FuncT', bound=Callable[..., Any]) - -MISSING: Any = discord.utils.MISSING - - -class CogMeta(type): - """A metaclass for defining a cog. - - Note that you should probably not use this directly. It is exposed - purely for documentation purposes along with making custom metaclasses to intermix - with other metaclasses such as the :class:`abc.ABCMeta` metaclass. - - For example, to create an abstract cog mixin class, the following would be done. - - .. code-block:: python3 - - import abc - - class CogABCMeta(commands.CogMeta, abc.ABCMeta): - pass - - class SomeMixin(metaclass=abc.ABCMeta): - pass - - class SomeCogMixin(SomeMixin, commands.Cog, metaclass=CogABCMeta): - pass - - .. note:: - - When passing an attribute of a metaclass that is documented below, note - that you must pass it as a keyword-only argument to the class creation - like the following example: - - .. code-block:: python3 - - class MyCog(commands.Cog, name='My Cog'): - pass - - Attributes - ----------- - name: :class:`str` - The cog name. By default, it is the name of the class with no modification. - description: :class:`str` - The cog description. By default, it is the cleaned docstring of the class. - - .. versionadded:: 1.6 - - command_attrs: :class:`dict` - A list of attributes to apply to every command inside this cog. The dictionary - is passed into the :class:`Command` options at ``__init__``. - If you specify attributes inside the command attribute in the class, it will - override the one specified inside this attribute. For example: - - .. code-block:: python3 - - class MyCog(commands.Cog, command_attrs=dict(hidden=True)): - @commands.command() - async def foo(self, ctx): - pass # hidden -> True - - @commands.command(hidden=False) - async def bar(self, ctx): - pass # hidden -> False - - group_name: Union[:class:`str`, :class:`~discord.app_commands.locale_str`] - The group name of a cog. This is only applicable for :class:`GroupCog` instances. - By default, it's the same value as :attr:`name`. - - .. versionadded:: 2.0 - group_description: Union[:class:`str`, :class:`~discord.app_commands.locale_str`] - The group description of a cog. This is only applicable for :class:`GroupCog` instances. - By default, it's the same value as :attr:`description`. - - .. versionadded:: 2.0 - group_nsfw: :class:`bool` - Whether the application command group is NSFW. This is only applicable for :class:`GroupCog` instances. - By default, it's ``False``. - - .. versionadded:: 2.0 - group_auto_locale_strings: :class:`bool` - If this is set to ``True``, then all translatable strings will implicitly - be wrapped into :class:`~discord.app_commands.locale_str` rather - than :class:`str`. Defaults to ``True``. - - .. versionadded:: 2.0 - group_extras: :class:`dict` - A dictionary that can be used to store extraneous data. - This is only applicable for :class:`GroupCog` instances. - The library will not touch any values or keys within this dictionary. - - .. versionadded:: 2.1 - """ - - __cog_name__: str - __cog_description__: str - __cog_group_name__: Union[str, app_commands.locale_str] - __cog_group_description__: Union[str, app_commands.locale_str] - __cog_group_nsfw__: bool - __cog_group_auto_locale_strings__: bool - __cog_group_extras__: Dict[Any, Any] - __cog_settings__: Dict[str, Any] - __cog_commands__: List[Command[Any, ..., Any]] - __cog_app_commands__: List[Union[app_commands.Group, app_commands.Command[Any, ..., Any]]] - __cog_listeners__: List[Tuple[str, str]] - - def __new__(cls, *args: Any, **kwargs: Any) -> Self: - name, bases, attrs = args - if any(issubclass(base, app_commands.Group) for base in bases): - raise TypeError( - 'Cannot inherit from app_commands.Group with commands.Cog, consider using commands.GroupCog instead' - ) - - # If name='...' is given but not group_name='...' then name='...' is used for both. - # If neither is given then cog name is the class name but group name is kebab case - try: - cog_name = kwargs.pop('name') - except KeyError: - cog_name = name - try: - group_name = kwargs.pop('group_name') - except KeyError: - group_name = _to_kebab_case(name) - else: - group_name = kwargs.pop('group_name', cog_name) - - attrs['__cog_settings__'] = kwargs.pop('command_attrs', {}) - attrs['__cog_name__'] = cog_name - attrs['__cog_group_name__'] = group_name - attrs['__cog_group_nsfw__'] = kwargs.pop('group_nsfw', False) - attrs['__cog_group_auto_locale_strings__'] = kwargs.pop('group_auto_locale_strings', True) - attrs['__cog_group_extras__'] = kwargs.pop('group_extras', {}) - - description = kwargs.pop('description', None) - if description is None: - description = inspect.cleandoc(attrs.get('__doc__', '')) - - attrs['__cog_description__'] = description - attrs['__cog_group_description__'] = kwargs.pop('group_description', description or '\u2026') - - commands = {} - cog_app_commands = {} - listeners = {} - no_bot_cog = 'Commands or listeners must not start with cog_ or bot_ (in method {0.__name__}.{1})' - - new_cls = super().__new__(cls, name, bases, attrs, **kwargs) - for base in reversed(new_cls.__mro__): - for elem, value in base.__dict__.items(): - if elem in commands: - del commands[elem] - if elem in listeners: - del listeners[elem] - - is_static_method = isinstance(value, staticmethod) - if is_static_method: - value = value.__func__ - if isinstance(value, _BaseCommand): - if is_static_method: - raise TypeError(f'Command in method {base}.{elem!r} must not be staticmethod.') - if elem.startswith(('cog_', 'bot_')): - raise TypeError(no_bot_cog.format(base, elem)) - commands[elem] = value - elif isinstance(value, (app_commands.Group, app_commands.Command)) and value.parent is None: - if is_static_method: - raise TypeError(f'Command in method {base}.{elem!r} must not be staticmethod.') - if elem.startswith(('cog_', 'bot_')): - raise TypeError(no_bot_cog.format(base, elem)) - cog_app_commands[elem] = value - elif inspect.iscoroutinefunction(value): - try: - getattr(value, '__cog_listener__') - except AttributeError: - continue - else: - if elem.startswith(('cog_', 'bot_')): - raise TypeError(no_bot_cog.format(base, elem)) - listeners[elem] = value - - new_cls.__cog_commands__ = list(commands.values()) # this will be copied in Cog.__new__ - new_cls.__cog_app_commands__ = list(cog_app_commands.values()) - - listeners_as_list = [] - for listener in listeners.values(): - for listener_name in listener.__cog_listener_names__: - # I use __name__ instead of just storing the value so I can inject - # the self attribute when the time comes to add them to the bot - listeners_as_list.append((listener_name, listener.__name__)) - - new_cls.__cog_listeners__ = listeners_as_list - return new_cls - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args) - - @classmethod - def qualified_name(cls) -> str: - return cls.__cog_name__ - - -def _cog_special_method(func: FuncT) -> FuncT: - func.__cog_special_method__ = None - return func - - -class Cog(metaclass=CogMeta): - """The base class that all cogs must inherit from. - - A cog is a collection of commands, listeners, and optional state to - help group commands together. More information on them can be found on - the :ref:`ext_commands_cogs` page. - - When inheriting from this class, the options shown in :class:`CogMeta` - are equally valid here. - """ - - __cog_name__: str - __cog_description__: str - __cog_group_name__: Union[str, app_commands.locale_str] - __cog_group_description__: Union[str, app_commands.locale_str] - __cog_settings__: Dict[str, Any] - __cog_commands__: List[Command[Self, ..., Any]] - __cog_app_commands__: List[Union[app_commands.Group, app_commands.Command[Self, ..., Any]]] - __cog_listeners__: List[Tuple[str, str]] - __cog_is_app_commands_group__: ClassVar[bool] = False - __cog_app_commands_group__: Optional[app_commands.Group] - __discord_app_commands_error_handler__: Optional[ - Callable[[discord.Interaction, app_commands.AppCommandError], Coroutine[Any, Any, None]] - ] - - def __new__(cls, *args: Any, **kwargs: Any) -> Self: - # For issue 426, we need to store a copy of the command objects - # since we modify them to inject `self` to them. - # To do this, we need to interfere with the Cog creation process. - self = super().__new__(cls) - cmd_attrs = cls.__cog_settings__ - - # Either update the command with the cog provided defaults or copy it. - # r.e type ignore, type-checker complains about overriding a ClassVar - self.__cog_commands__ = tuple(c._update_copy(cmd_attrs) for c in cls.__cog_commands__) # type: ignore - - lookup = {cmd.qualified_name: cmd for cmd in self.__cog_commands__} - - # Register the application commands - children: List[Union[app_commands.Group, app_commands.Command[Self, ..., Any]]] = [] - - if cls.__cog_is_app_commands_group__: - group = app_commands.Group( - name=cls.__cog_group_name__, - description=cls.__cog_group_description__, - nsfw=cls.__cog_group_nsfw__, - auto_locale_strings=cls.__cog_group_auto_locale_strings__, - parent=None, - guild_ids=getattr(cls, '__discord_app_commands_default_guilds__', None), - guild_only=getattr(cls, '__discord_app_commands_guild_only__', False), - default_permissions=getattr(cls, '__discord_app_commands_default_permissions__', None), - extras=cls.__cog_group_extras__, - ) - else: - group = None - - self.__cog_app_commands_group__ = group - - # Update the Command instances dynamically as well - for command in self.__cog_commands__: - setattr(self, command.callback.__name__, command) - parent = command.parent - if parent is not None: - # Get the latest parent reference - parent = lookup[parent.qualified_name] # type: ignore - - # Update our parent's reference to our self - parent.remove_command(command.name) # type: ignore - parent.add_command(command) # type: ignore - - if hasattr(command, '__commands_is_hybrid__') and parent is None: - app_command: Optional[Union[app_commands.Group, app_commands.Command[Self, ..., Any]]] = getattr( - command, 'app_command', None - ) - if app_command: - group_parent = self.__cog_app_commands_group__ - app_command = app_command._copy_with(parent=group_parent, binding=self) - # The type checker does not see the app_command attribute even though it exists - command.app_command = app_command # type: ignore - - if self.__cog_app_commands_group__: - children.append(app_command) # type: ignore # Somehow it thinks it can be None here - - if Cog._get_overridden_method(self.cog_app_command_error) is not None: - error_handler = self.cog_app_command_error - else: - error_handler = None - - self.__discord_app_commands_error_handler__ = error_handler - - for command in cls.__cog_app_commands__: - copy = command._copy_with(parent=self.__cog_app_commands_group__, binding=self) - - # Update set bindings - if copy._attr: - setattr(self, copy._attr, copy) - - if isinstance(copy, app_commands.Group): - copy.__discord_app_commands_error_handler__ = error_handler - for command in copy._children.values(): - if isinstance(command, app_commands.Group): - command.__discord_app_commands_error_handler__ = error_handler - - children.append(copy) - - self.__cog_app_commands__ = children - if self.__cog_app_commands_group__: - self.__cog_app_commands_group__.module = cls.__module__ - mapping = {cmd.name: cmd for cmd in children} - if len(mapping) > 25: - raise TypeError('maximum number of application command children exceeded') - - self.__cog_app_commands_group__._children = mapping # type: ignore # Variance issue - - return self - - def get_commands(self) -> List[Command[Self, ..., Any]]: - r"""Returns the commands that are defined inside this cog. - - This does *not* include :class:`discord.app_commands.Command` or :class:`discord.app_commands.Group` - instances. - - Returns - -------- - List[:class:`.Command`] - A :class:`list` of :class:`.Command`\s that are - defined inside this cog, not including subcommands. - """ - return [c for c in self.__cog_commands__ if c.parent is None] - - def get_app_commands(self) -> List[Union[app_commands.Command[Self, ..., Any], app_commands.Group]]: - r"""Returns the app commands that are defined inside this cog. - - Returns - -------- - List[Union[:class:`discord.app_commands.Command`, :class:`discord.app_commands.Group`]] - A :class:`list` of :class:`discord.app_commands.Command`\s and :class:`discord.app_commands.Group`\s that are - defined inside this cog, not including subcommands. - """ - return [c for c in self.__cog_app_commands__ if c.parent is None] - - @property - def qualified_name(self) -> str: - """:class:`str`: Returns the cog's specified name, not the class name.""" - return self.__cog_name__ - - @property - def description(self) -> str: - """:class:`str`: Returns the cog's description, typically the cleaned docstring.""" - return self.__cog_description__ - - @description.setter - def description(self, description: str) -> None: - self.__cog_description__ = description - - def walk_commands(self) -> Generator[Command[Self, ..., Any], None, None]: - """An iterator that recursively walks through this cog's commands and subcommands. - - Yields - ------ - Union[:class:`.Command`, :class:`.Group`] - A command or group from the cog. - """ - from .core import GroupMixin - - for command in self.__cog_commands__: - if command.parent is None: - yield command - if isinstance(command, GroupMixin): - yield from command.walk_commands() - - def walk_app_commands(self) -> Generator[Union[app_commands.Command[Self, ..., Any], app_commands.Group], None, None]: - """An iterator that recursively walks through this cog's app commands and subcommands. - - Yields - ------ - Union[:class:`discord.app_commands.Command`, :class:`discord.app_commands.Group`] - An app command or group from the cog. - """ - for command in self.__cog_app_commands__: - yield command - if isinstance(command, app_commands.Group): - yield from command.walk_commands() - - @property - def app_command(self) -> Optional[app_commands.Group]: - """Optional[:class:`discord.app_commands.Group`]: Returns the associated group with this cog. - - This is only available if inheriting from :class:`GroupCog`. - """ - return self.__cog_app_commands_group__ - - def get_listeners(self) -> List[Tuple[str, Callable[..., Any]]]: - """Returns a :class:`list` of (name, function) listener pairs that are defined in this cog. - - Returns - -------- - List[Tuple[:class:`str`, :ref:`coroutine `]] - The listeners defined in this cog. - """ - return [(name, getattr(self, method_name)) for name, method_name in self.__cog_listeners__] - - @classmethod - def _get_overridden_method(cls, method: FuncT) -> Optional[FuncT]: - """Return None if the method is not overridden. Otherwise returns the overridden method.""" - return getattr(method.__func__, '__cog_special_method__', method) - - @classmethod - def listener(cls, name: str = MISSING) -> Callable[[FuncT], FuncT]: - """A decorator that marks a function as a listener. - - This is the cog equivalent of :meth:`.Bot.listen`. - - Parameters - ------------ - name: :class:`str` - The name of the event being listened to. If not provided, it - defaults to the function's name. - - Raises - -------- - TypeError - The function is not a coroutine function or a string was not passed as - the name. - """ - - if name is not MISSING and not isinstance(name, str): - raise TypeError(f'Cog.listener expected str but received {name.__class__.__name__} instead.') - - def decorator(func: FuncT) -> FuncT: - actual = func - if isinstance(actual, staticmethod): - actual = actual.__func__ - if not inspect.iscoroutinefunction(actual): - raise TypeError('Listener function must be a coroutine function.') - actual.__cog_listener__ = True - to_assign = name or actual.__name__ - try: - actual.__cog_listener_names__.append(to_assign) - except AttributeError: - actual.__cog_listener_names__ = [to_assign] - # we have to return `func` instead of `actual` because - # we need the type to be `staticmethod` for the metaclass - # to pick it up but the metaclass unfurls the function and - # thus the assignments need to be on the actual function - return func - - return decorator - - def has_error_handler(self) -> bool: - """:class:`bool`: Checks whether the cog has an error handler. - - .. versionadded:: 1.7 - """ - return not hasattr(self.cog_command_error.__func__, '__cog_special_method__') - - def has_app_command_error_handler(self) -> bool: - """:class:`bool`: Checks whether the cog has an app error handler. - - .. versionadded:: 2.1 - """ - return not hasattr(self.cog_app_command_error.__func__, '__cog_special_method__') - - @_cog_special_method - async def cog_load(self) -> None: - """|maybecoro| - - A special method that is called when the cog gets loaded. - - Subclasses must replace this if they want special asynchronous loading behaviour. - Note that the ``__init__`` special method does not allow asynchronous code to run - inside it, thus this is helpful for setting up code that needs to be asynchronous. - - .. versionadded:: 2.0 - """ - pass - - @_cog_special_method - async def cog_unload(self) -> None: - """|maybecoro| - - A special method that is called when the cog gets removed. - - Subclasses must replace this if they want special unloading behaviour. - - Exceptions raised in this method are ignored during extension unloading. - - .. versionchanged:: 2.0 - - This method can now be a :term:`coroutine`. - """ - pass - - @_cog_special_method - def bot_check_once(self, ctx: Context[BotT]) -> bool: - """A special method that registers as a :meth:`.Bot.check_once` - check. - - This function **can** be a coroutine and must take a sole parameter, - ``ctx``, to represent the :class:`.Context`. - """ - return True - - @_cog_special_method - def bot_check(self, ctx: Context[BotT]) -> bool: - """A special method that registers as a :meth:`.Bot.check` - check. - - This function **can** be a coroutine and must take a sole parameter, - ``ctx``, to represent the :class:`.Context`. - """ - return True - - @_cog_special_method - def cog_check(self, ctx: Context[BotT]) -> bool: - """A special method that registers as a :func:`~discord.ext.commands.check` - for every command and subcommand in this cog. - - This function **can** be a coroutine and must take a sole parameter, - ``ctx``, to represent the :class:`.Context`. - """ - return True - - @_cog_special_method - def interaction_check(self, interaction: discord.Interaction[ClientT], /) -> bool: - """A special method that registers as a :func:`discord.app_commands.check` - for every app command and subcommand in this cog. - - This function **can** be a coroutine and must take a sole parameter, - ``interaction``, to represent the :class:`~discord.Interaction`. - - .. versionadded:: 2.0 - """ - return True - - @_cog_special_method - async def cog_command_error(self, ctx: Context[BotT], error: Exception) -> None: - """|coro| - - A special method that is called whenever an error - is dispatched inside this cog. - - This is similar to :func:`.on_command_error` except only applying - to the commands inside this cog. - - This **must** be a coroutine. - - Parameters - ----------- - ctx: :class:`.Context` - The invocation context where the error happened. - error: :class:`CommandError` - The error that happened. - """ - pass - - @_cog_special_method - async def cog_app_command_error(self, interaction: discord.Interaction, error: app_commands.AppCommandError) -> None: - """|coro| - - A special method that is called whenever an error within - an application command is dispatched inside this cog. - - This is similar to :func:`discord.app_commands.CommandTree.on_error` except - only applying to the application commands inside this cog. - - This **must** be a coroutine. - - Parameters - ----------- - interaction: :class:`~discord.Interaction` - The interaction that is being handled. - error: :exc:`~discord.app_commands.AppCommandError` - The exception that was raised. - """ - pass - - @_cog_special_method - async def cog_before_invoke(self, ctx: Context[BotT]) -> None: - """|coro| - - A special method that acts as a cog local pre-invoke hook. - - This is similar to :meth:`.Command.before_invoke`. - - This **must** be a coroutine. - - Parameters - ----------- - ctx: :class:`.Context` - The invocation context. - """ - pass - - @_cog_special_method - async def cog_after_invoke(self, ctx: Context[BotT]) -> None: - """|coro| - - A special method that acts as a cog local post-invoke hook. - - This is similar to :meth:`.Command.after_invoke`. - - This **must** be a coroutine. - - Parameters - ----------- - ctx: :class:`.Context` - The invocation context. - """ - pass - - async def _inject(self, bot: BotBase, override: bool, guild: Optional[Snowflake], guilds: Sequence[Snowflake]) -> Self: - cls = self.__class__ - - # we'll call this first so that errors can propagate without - # having to worry about undoing anything - await maybe_coroutine(self.cog_load) - - # realistically, the only thing that can cause loading errors - # is essentially just the command loading, which raises if there are - # duplicates. When this condition is met, we want to undo all what - # we've added so far for some form of atomic loading. - for index, command in enumerate(self.__cog_commands__): - command.cog = self - if command.parent is None: - try: - bot.add_command(command) - except Exception as e: - # undo our additions - for to_undo in self.__cog_commands__[:index]: - if to_undo.parent is None: - bot.remove_command(to_undo.name) - try: - await maybe_coroutine(self.cog_unload) - finally: - raise e - - # check if we're overriding the default - if cls.bot_check is not Cog.bot_check: - bot.add_check(self.bot_check) - - if cls.bot_check_once is not Cog.bot_check_once: - bot.add_check(self.bot_check_once, call_once=True) - - # while Bot.add_listener can raise if it's not a coroutine, - # this precondition is already met by the listener decorator - # already, thus this should never raise. - # Outside of, memory errors and the like... - for name, method_name in self.__cog_listeners__: - bot.add_listener(getattr(self, method_name), name) - - # Only do this if these are "top level" commands - if not self.__cog_app_commands_group__: - for command in self.__cog_app_commands__: - # This is already atomic - bot.tree.add_command(command, override=override, guild=guild, guilds=guilds) - - return self - - async def _eject(self, bot: BotBase, guild_ids: Optional[Iterable[int]]) -> None: - cls = self.__class__ - - try: - for command in self.__cog_commands__: - if command.parent is None: - bot.remove_command(command.name) - - if not self.__cog_app_commands_group__: - for command in self.__cog_app_commands__: - guild_ids = guild_ids or command._guild_ids - if guild_ids is None: - bot.tree.remove_command(command.name) - else: - for guild_id in guild_ids: - bot.tree.remove_command(command.name, guild=discord.Object(id=guild_id)) - - for name, method_name in self.__cog_listeners__: - bot.remove_listener(getattr(self, method_name), name) - - if cls.bot_check is not Cog.bot_check: - bot.remove_check(self.bot_check) - - if cls.bot_check_once is not Cog.bot_check_once: - bot.remove_check(self.bot_check_once, call_once=True) - finally: - try: - await maybe_coroutine(self.cog_unload) - except Exception: - pass - - -class GroupCog(Cog): - """Represents a cog that also doubles as a parent :class:`discord.app_commands.Group` for - the application commands defined within it. - - This inherits from :class:`Cog` and the options in :class:`CogMeta` also apply to this. - See the :class:`Cog` documentation for methods. - - Decorators such as :func:`~discord.app_commands.guild_only`, :func:`~discord.app_commands.guilds`, - and :func:`~discord.app_commands.default_permissions` will apply to the group if used on top of the - cog. - - Hybrid commands will also be added to the Group, giving the ability to categorize slash commands into - groups, while keeping the prefix-style command as a root-level command. - - For example: - - .. code-block:: python3 - - from discord import app_commands - from discord.ext import commands - - @app_commands.guild_only() - class MyCog(commands.GroupCog, group_name='my-cog'): - pass - - .. versionadded:: 2.0 - """ - - __cog_is_app_commands_group__: ClassVar[bool] = True diff --git a/.venv/Lib/site-packages/discord/ext/commands/context.py b/.venv/Lib/site-packages/discord/ext/commands/context.py deleted file mode 100644 index 40ef48c..0000000 --- a/.venv/Lib/site-packages/discord/ext/commands/context.py +++ /dev/null @@ -1,1065 +0,0 @@ -""" -The MIT License (MIT) - -Copyright (c) 2015-present Rapptz - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the "Software"), -to deal in the Software without restriction, including without limitation -the rights to use, copy, modify, merge, publish, distribute, sublicense, -and/or sell copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. -""" -from __future__ import annotations - -import re -from typing import TYPE_CHECKING, Any, Dict, Generator, Generic, List, Optional, TypeVar, Union, Sequence, Type, overload - -import discord.abc -import discord.utils -from discord import Interaction, Message, Attachment, MessageType, User, PartialMessageable, Permissions, ChannelType, Thread -from discord.context_managers import Typing -from .view import StringView - -from ._types import BotT - -if TYPE_CHECKING: - from typing_extensions import Self, ParamSpec, TypeGuard - - from discord.abc import MessageableChannel - from discord.guild import Guild - from discord.member import Member - from discord.state import ConnectionState - from discord.user import ClientUser - from discord.voice_client import VoiceProtocol - from discord.embeds import Embed - from discord.file import File - from discord.mentions import AllowedMentions - from discord.sticker import GuildSticker, StickerItem - from discord.message import MessageReference, PartialMessage - from discord.ui import View - from discord.types.interactions import ApplicationCommandInteractionData - - from .cog import Cog - from .core import Command - from .parameters import Parameter - - from types import TracebackType - - BE = TypeVar('BE', bound=BaseException) - -# fmt: off -__all__ = ( - 'Context', -) -# fmt: on - -MISSING: Any = discord.utils.MISSING - - -T = TypeVar('T') -CogT = TypeVar('CogT', bound="Cog") - -if TYPE_CHECKING: - P = ParamSpec('P') -else: - P = TypeVar('P') - - -def is_cog(obj: Any) -> TypeGuard[Cog]: - return hasattr(obj, '__cog_commands__') - - -class DeferTyping: - def __init__(self, ctx: Context[BotT], *, ephemeral: bool): - self.ctx: Context[BotT] = ctx - self.ephemeral: bool = ephemeral - - def __await__(self) -> Generator[Any, None, None]: - return self.ctx.defer(ephemeral=self.ephemeral).__await__() - - async def __aenter__(self) -> None: - await self.ctx.defer(ephemeral=self.ephemeral) - - async def __aexit__( - self, - exc_type: Optional[Type[BE]], - exc: Optional[BE], - traceback: Optional[TracebackType], - ) -> None: - pass - - -class Context(discord.abc.Messageable, Generic[BotT]): - r"""Represents the context in which a command is being invoked under. - - This class contains a lot of meta data to help you understand more about - the invocation context. This class is not created manually and is instead - passed around to commands as the first parameter. - - This class implements the :class:`~discord.abc.Messageable` ABC. - - Attributes - ----------- - message: :class:`.Message` - The message that triggered the command being executed. - - .. note:: - - In the case of an interaction based context, this message is "synthetic" - and does not actually exist. Therefore, the ID on it is invalid similar - to ephemeral messages. - bot: :class:`.Bot` - The bot that contains the command being executed. - args: :class:`list` - The list of transformed arguments that were passed into the command. - If this is accessed during the :func:`.on_command_error` event - then this list could be incomplete. - kwargs: :class:`dict` - A dictionary of transformed arguments that were passed into the command. - Similar to :attr:`args`\, if this is accessed in the - :func:`.on_command_error` event then this dict could be incomplete. - current_parameter: Optional[:class:`Parameter`] - The parameter that is currently being inspected and converted. - This is only of use for within converters. - - .. versionadded:: 2.0 - current_argument: Optional[:class:`str`] - The argument string of the :attr:`current_parameter` that is currently being converted. - This is only of use for within converters. - - .. versionadded:: 2.0 - interaction: Optional[:class:`~discord.Interaction`] - The interaction associated with this context. - - .. versionadded:: 2.0 - prefix: Optional[:class:`str`] - The prefix that was used to invoke the command. For interaction based contexts, - this is ``/`` for slash commands and ``\u200b`` for context menu commands. - command: Optional[:class:`Command`] - The command that is being invoked currently. - invoked_with: Optional[:class:`str`] - The command name that triggered this invocation. Useful for finding out - which alias called the command. - invoked_parents: List[:class:`str`] - The command names of the parents that triggered this invocation. Useful for - finding out which aliases called the command. - - For example in commands ``?a b c test``, the invoked parents are ``['a', 'b', 'c']``. - - .. versionadded:: 1.7 - - invoked_subcommand: Optional[:class:`Command`] - The subcommand that was invoked. - If no valid subcommand was invoked then this is equal to ``None``. - subcommand_passed: Optional[:class:`str`] - The string that was attempted to call a subcommand. This does not have - to point to a valid registered subcommand and could just point to a - nonsense string. If nothing was passed to attempt a call to a - subcommand then this is set to ``None``. - command_failed: :class:`bool` - A boolean that indicates if the command failed to be parsed, checked, - or invoked. - """ - - def __init__( - self, - *, - message: Message, - bot: BotT, - view: StringView, - args: List[Any] = MISSING, - kwargs: Dict[str, Any] = MISSING, - prefix: Optional[str] = None, - command: Optional[Command[Any, ..., Any]] = None, - invoked_with: Optional[str] = None, - invoked_parents: List[str] = MISSING, - invoked_subcommand: Optional[Command[Any, ..., Any]] = None, - subcommand_passed: Optional[str] = None, - command_failed: bool = False, - current_parameter: Optional[Parameter] = None, - current_argument: Optional[str] = None, - interaction: Optional[Interaction[BotT]] = None, - ): - self.message: Message = message - self.bot: BotT = bot - self.args: List[Any] = args or [] - self.kwargs: Dict[str, Any] = kwargs or {} - self.prefix: Optional[str] = prefix - self.command: Optional[Command[Any, ..., Any]] = command - self.view: StringView = view - self.invoked_with: Optional[str] = invoked_with - self.invoked_parents: List[str] = invoked_parents or [] - self.invoked_subcommand: Optional[Command[Any, ..., Any]] = invoked_subcommand - self.subcommand_passed: Optional[str] = subcommand_passed - self.command_failed: bool = command_failed - self.current_parameter: Optional[Parameter] = current_parameter - self.current_argument: Optional[str] = current_argument - self.interaction: Optional[Interaction[BotT]] = interaction - self._state: ConnectionState = self.message._state - - @classmethod - async def from_interaction(cls, interaction: Interaction[BotT], /) -> Self: - """|coro| - - Creates a context from a :class:`discord.Interaction`. This only - works on application command based interactions, such as slash commands - or context menus. - - On slash command based interactions this creates a synthetic :class:`~discord.Message` - that points to an ephemeral message that the command invoker has executed. This means - that :attr:`Context.author` returns the member that invoked the command. - - In a message context menu based interaction, the :attr:`Context.message` attribute - is the message that the command is being executed on. This means that :attr:`Context.author` - returns the author of the message being targetted. To get the member that invoked - the command then :attr:`discord.Interaction.user` should be used instead. - - .. versionadded:: 2.0 - - Parameters - ----------- - interaction: :class:`discord.Interaction` - The interaction to create a context with. - - Raises - ------- - ValueError - The interaction does not have a valid command. - TypeError - The interaction client is not derived from :class:`Bot` or :class:`AutoShardedBot`. - """ - - # Circular import - from .bot import BotBase - - if not isinstance(interaction.client, BotBase): - raise TypeError('Interaction client is not derived from commands.Bot or commands.AutoShardedBot') - - command = interaction.command - if command is None: - raise ValueError('interaction does not have command data') - - bot: BotT = interaction.client # type: ignore - data: ApplicationCommandInteractionData = interaction.data # type: ignore - if interaction.message is None: - synthetic_payload = { - 'id': interaction.id, - 'reactions': [], - 'embeds': [], - 'mention_everyone': False, - 'tts': False, - 'pinned': False, - 'edited_timestamp': None, - 'type': MessageType.chat_input_command if data.get('type', 1) == 1 else MessageType.context_menu_command, - 'flags': 64, - 'content': '', - 'mentions': [], - 'mention_roles': [], - 'attachments': [], - } - - if interaction.channel_id is None: - raise RuntimeError('interaction channel ID is null, this is probably a Discord bug') - - channel = interaction.channel or PartialMessageable( - state=interaction._state, guild_id=interaction.guild_id, id=interaction.channel_id - ) - message = Message(state=interaction._state, channel=channel, data=synthetic_payload) # type: ignore - message.author = interaction.user - message.attachments = [a for _, a in interaction.namespace if isinstance(a, Attachment)] - else: - message = interaction.message - - prefix = '/' if data.get('type', 1) == 1 else '\u200b' # Mock the prefix - ctx = cls( - message=message, - bot=bot, - view=StringView(''), - args=[], - kwargs={}, - prefix=prefix, - interaction=interaction, - invoked_with=command.name, - command=command, # type: ignore # this will be a hybrid command, technically - ) - interaction._baton = ctx - ctx.command_failed = interaction.command_failed - return ctx - - async def invoke(self, command: Command[CogT, P, T], /, *args: P.args, **kwargs: P.kwargs) -> T: - r"""|coro| - - Calls a command with the arguments given. - - This is useful if you want to just call the callback that a - :class:`.Command` holds internally. - - .. note:: - - This does not handle converters, checks, cooldowns, pre-invoke, - or after-invoke hooks in any matter. It calls the internal callback - directly as-if it was a regular function. - - You must take care in passing the proper arguments when - using this function. - - .. versionchanged:: 2.0 - - ``command`` parameter is now positional-only. - - Parameters - ----------- - command: :class:`.Command` - The command that is going to be called. - \*args - The arguments to use. - \*\*kwargs - The keyword arguments to use. - - Raises - ------- - TypeError - The command argument to invoke is missing. - """ - return await command(self, *args, **kwargs) - - async def reinvoke(self, *, call_hooks: bool = False, restart: bool = True) -> None: - """|coro| - - Calls the command again. - - This is similar to :meth:`~.Context.invoke` except that it bypasses - checks, cooldowns, and error handlers. - - .. note:: - - If you want to bypass :exc:`.UserInputError` derived exceptions, - it is recommended to use the regular :meth:`~.Context.invoke` - as it will work more naturally. After all, this will end up - using the old arguments the user has used and will thus just - fail again. - - Parameters - ------------ - call_hooks: :class:`bool` - Whether to call the before and after invoke hooks. - restart: :class:`bool` - Whether to start the call chain from the very beginning - or where we left off (i.e. the command that caused the error). - The default is to start where we left off. - - Raises - ------- - ValueError - The context to reinvoke is not valid. - """ - cmd = self.command - view = self.view - if cmd is None: - raise ValueError('This context is not valid.') - - # some state to revert to when we're done - index, previous = view.index, view.previous - invoked_with = self.invoked_with - invoked_subcommand = self.invoked_subcommand - invoked_parents = self.invoked_parents - subcommand_passed = self.subcommand_passed - - if restart: - to_call = cmd.root_parent or cmd - view.index = len(self.prefix or '') - view.previous = 0 - self.invoked_parents = [] - self.invoked_with = view.get_word() # advance to get the root command - else: - to_call = cmd - - try: - await to_call.reinvoke(self, call_hooks=call_hooks) - finally: - self.command = cmd - view.index = index - view.previous = previous - self.invoked_with = invoked_with - self.invoked_subcommand = invoked_subcommand - self.invoked_parents = invoked_parents - self.subcommand_passed = subcommand_passed - - @property - def valid(self) -> bool: - """:class:`bool`: Checks if the invocation context is valid to be invoked with.""" - return self.prefix is not None and self.command is not None - - async def _get_channel(self) -> discord.abc.Messageable: - return self.channel - - @property - def clean_prefix(self) -> str: - """:class:`str`: The cleaned up invoke prefix. i.e. mentions are ``@name`` instead of ``<@id>``. - - .. versionadded:: 2.0 - """ - if self.prefix is None: - return '' - - user = self.me - # this breaks if the prefix mention is not the bot itself but I - # consider this to be an *incredibly* strange use case. I'd rather go - # for this common use case rather than waste performance for the - # odd one. - pattern = re.compile(r"<@!?%s>" % user.id) - return pattern.sub("@%s" % user.display_name.replace('\\', r'\\'), self.prefix) - - @property - def cog(self) -> Optional[Cog]: - """Optional[:class:`.Cog`]: Returns the cog associated with this context's command. None if it does not exist.""" - - if self.command is None: - return None - return self.command.cog - - @property - def filesize_limit(self) -> int: - """:class:`int`: Returns the maximum number of bytes files can have when uploaded to this guild or DM channel associated with this context. - - .. versionadded:: 2.3 - """ - return self.guild.filesize_limit if self.guild is not None else discord.utils.DEFAULT_FILE_SIZE_LIMIT_BYTES - - @discord.utils.cached_property - def guild(self) -> Optional[Guild]: - """Optional[:class:`.Guild`]: Returns the guild associated with this context's command. None if not available.""" - return self.message.guild - - @discord.utils.cached_property - def channel(self) -> MessageableChannel: - """Union[:class:`.abc.Messageable`]: Returns the channel associated with this context's command. - Shorthand for :attr:`.Message.channel`. - """ - return self.message.channel - - @discord.utils.cached_property - def author(self) -> Union[User, Member]: - """Union[:class:`~discord.User`, :class:`.Member`]: - Returns the author associated with this context's command. Shorthand for :attr:`.Message.author` - """ - return self.message.author - - @discord.utils.cached_property - def me(self) -> Union[Member, ClientUser]: - """Union[:class:`.Member`, :class:`.ClientUser`]: - Similar to :attr:`.Guild.me` except it may return the :class:`.ClientUser` in private message contexts. - """ - # bot.user will never be None at this point. - return self.guild.me if self.guild is not None else self.bot.user # type: ignore - - @discord.utils.cached_property - def permissions(self) -> Permissions: - """:class:`.Permissions`: Returns the resolved permissions for the invoking user in this channel. - Shorthand for :meth:`.abc.GuildChannel.permissions_for` or :attr:`.Interaction.permissions`. - - .. versionadded:: 2.0 - """ - if self.channel.type is ChannelType.private: - return Permissions._dm_permissions() - if not self.interaction: - # channel and author will always match relevant types here - return self.channel.permissions_for(self.author) # type: ignore - base = self.interaction.permissions - if self.channel.type in (ChannelType.voice, ChannelType.stage_voice): - if not base.connect: - # voice channels cannot be edited by people who can't connect to them - # It also implicitly denies all other voice perms - denied = Permissions.voice() - denied.update(manage_channels=True, manage_roles=True) - base.value &= ~denied.value - else: - # text channels do not have voice related permissions - denied = Permissions.voice() - base.value &= ~denied.value - return base - - @discord.utils.cached_property - def bot_permissions(self) -> Permissions: - """:class:`.Permissions`: Returns the resolved permissions for the bot in this channel. - Shorthand for :meth:`.abc.GuildChannel.permissions_for` or :attr:`.Interaction.app_permissions`. - - For interaction-based commands, this will reflect the effective permissions - for :class:`Context` calls, which may differ from calls through - other :class:`.abc.Messageable` endpoints, like :attr:`channel`. - - Notably, sending messages, embedding links, and attaching files are always - permitted, while reading messages might not be. - - .. versionadded:: 2.0 - """ - channel = self.channel - if channel.type == ChannelType.private: - return Permissions._dm_permissions() - if not self.interaction: - # channel and me will always match relevant types here - return channel.permissions_for(self.me) # type: ignore - guild = channel.guild - base = self.interaction.app_permissions - if self.channel.type in (ChannelType.voice, ChannelType.stage_voice): - if not base.connect: - # voice channels cannot be edited by people who can't connect to them - # It also implicitly denies all other voice perms - denied = Permissions.voice() - denied.update(manage_channels=True, manage_roles=True) - base.value &= ~denied.value - else: - # text channels do not have voice related permissions - denied = Permissions.voice() - base.value &= ~denied.value - base.update( - embed_links=True, - attach_files=True, - send_tts_messages=False, - ) - if isinstance(channel, Thread): - base.send_messages_in_threads = True - else: - base.send_messages = True - return base - - @property - def voice_client(self) -> Optional[VoiceProtocol]: - r"""Optional[:class:`.VoiceProtocol`]: A shortcut to :attr:`.Guild.voice_client`\, if applicable.""" - g = self.guild - return g.voice_client if g else None - - async def send_help(self, *args: Any) -> Any: - """send_help(entity=) - - |coro| - - Shows the help command for the specified entity if given. - The entity can be a command or a cog. - - If no entity is given, then it'll show help for the - entire bot. - - If the entity is a string, then it looks up whether it's a - :class:`Cog` or a :class:`Command`. - - .. note:: - - Due to the way this function works, instead of returning - something similar to :meth:`~.commands.HelpCommand.command_not_found` - this returns ``None`` on bad input or no help command. - - Parameters - ------------ - entity: Optional[Union[:class:`Command`, :class:`Cog`, :class:`str`]] - The entity to show help for. - - Returns - -------- - Any - The result of the help command, if any. - """ - from .core import Command, Group, wrap_callback - from .errors import CommandError - - bot = self.bot - cmd = bot.help_command - - if cmd is None: - return None - - cmd = cmd.copy() - cmd.context = self - - if len(args) == 0: - await cmd.prepare_help_command(self, None) - mapping = cmd.get_bot_mapping() - injected = wrap_callback(cmd.send_bot_help) - try: - return await injected(mapping) - except CommandError as e: - await cmd.on_help_command_error(self, e) - return None - - entity = args[0] - if isinstance(entity, str): - entity = bot.get_cog(entity) or bot.get_command(entity) - - if entity is None: - return None - - try: - entity.qualified_name - except AttributeError: - # if we're here then it's not a cog, group, or command. - return None - - await cmd.prepare_help_command(self, entity.qualified_name) - - try: - if is_cog(entity): - injected = wrap_callback(cmd.send_cog_help) - return await injected(entity) - elif isinstance(entity, Group): - injected = wrap_callback(cmd.send_group_help) - return await injected(entity) - elif isinstance(entity, Command): - injected = wrap_callback(cmd.send_command_help) - return await injected(entity) - else: - return None - except CommandError as e: - await cmd.on_help_command_error(self, e) - - @overload - async def reply( - self, - content: Optional[str] = ..., - *, - tts: bool = ..., - embed: Embed = ..., - file: File = ..., - stickers: Sequence[Union[GuildSticker, StickerItem]] = ..., - delete_after: float = ..., - nonce: Union[str, int] = ..., - allowed_mentions: AllowedMentions = ..., - reference: Union[Message, MessageReference, PartialMessage] = ..., - mention_author: bool = ..., - view: View = ..., - suppress_embeds: bool = ..., - ephemeral: bool = ..., - silent: bool = ..., - ) -> Message: - ... - - @overload - async def reply( - self, - content: Optional[str] = ..., - *, - tts: bool = ..., - embed: Embed = ..., - files: Sequence[File] = ..., - stickers: Sequence[Union[GuildSticker, StickerItem]] = ..., - delete_after: float = ..., - nonce: Union[str, int] = ..., - allowed_mentions: AllowedMentions = ..., - reference: Union[Message, MessageReference, PartialMessage] = ..., - mention_author: bool = ..., - view: View = ..., - suppress_embeds: bool = ..., - ephemeral: bool = ..., - silent: bool = ..., - ) -> Message: - ... - - @overload - async def reply( - self, - content: Optional[str] = ..., - *, - tts: bool = ..., - embeds: Sequence[Embed] = ..., - file: File = ..., - stickers: Sequence[Union[GuildSticker, StickerItem]] = ..., - delete_after: float = ..., - nonce: Union[str, int] = ..., - allowed_mentions: AllowedMentions = ..., - reference: Union[Message, MessageReference, PartialMessage] = ..., - mention_author: bool = ..., - view: View = ..., - suppress_embeds: bool = ..., - ephemeral: bool = ..., - silent: bool = ..., - ) -> Message: - ... - - @overload - async def reply( - self, - content: Optional[str] = ..., - *, - tts: bool = ..., - embeds: Sequence[Embed] = ..., - files: Sequence[File] = ..., - stickers: Sequence[Union[GuildSticker, StickerItem]] = ..., - delete_after: float = ..., - nonce: Union[str, int] = ..., - allowed_mentions: AllowedMentions = ..., - reference: Union[Message, MessageReference, PartialMessage] = ..., - mention_author: bool = ..., - view: View = ..., - suppress_embeds: bool = ..., - ephemeral: bool = ..., - silent: bool = ..., - ) -> Message: - ... - - async def reply(self, content: Optional[str] = None, **kwargs: Any) -> Message: - """|coro| - - A shortcut method to :meth:`send` to reply to the - :class:`~discord.Message` referenced by this context. - - For interaction based contexts, this is the same as :meth:`send`. - - .. versionadded:: 1.6 - - .. versionchanged:: 2.0 - This function will now raise :exc:`TypeError` or - :exc:`ValueError` instead of ``InvalidArgument``. - - Raises - -------- - ~discord.HTTPException - Sending the message failed. - ~discord.Forbidden - You do not have the proper permissions to send the message. - ValueError - The ``files`` list is not of the appropriate size - TypeError - You specified both ``file`` and ``files``. - - Returns - --------- - :class:`~discord.Message` - The message that was sent. - """ - if self.interaction is None: - return await self.send(content, reference=self.message, **kwargs) - else: - return await self.send(content, **kwargs) - - def typing(self, *, ephemeral: bool = False) -> Union[Typing, DeferTyping]: - """Returns an asynchronous context manager that allows you to send a typing indicator to - the destination for an indefinite period of time, or 10 seconds if the context manager - is called using ``await``. - - In an interaction based context, this is equivalent to a :meth:`defer` call and - does not do any typing calls. - - Example Usage: :: - - async with channel.typing(): - # simulate something heavy - await asyncio.sleep(20) - - await channel.send('Done!') - - Example Usage: :: - - await channel.typing() - # Do some computational magic for about 10 seconds - await channel.send('Done!') - - .. versionchanged:: 2.0 - This no longer works with the ``with`` syntax, ``async with`` must be used instead. - - .. versionchanged:: 2.0 - Added functionality to ``await`` the context manager to send a typing indicator for 10 seconds. - - Parameters - ----------- - ephemeral: :class:`bool` - Indicates whether the deferred message will eventually be ephemeral. - Only valid for interaction based contexts. - - .. versionadded:: 2.0 - """ - if self.interaction is None: - return Typing(self) - return DeferTyping(self, ephemeral=ephemeral) - - async def defer(self, *, ephemeral: bool = False) -> None: - """|coro| - - Defers the interaction based contexts. - - This is typically used when the interaction is acknowledged - and a secondary action will be done later. - - If this isn't an interaction based context then it does nothing. - - Parameters - ----------- - ephemeral: :class:`bool` - Indicates whether the deferred message will eventually be ephemeral. - - Raises - ------- - HTTPException - Deferring the interaction failed. - InteractionResponded - This interaction has already been responded to before. - """ - - if self.interaction: - await self.interaction.response.defer(ephemeral=ephemeral) - - @overload - async def send( - self, - content: Optional[str] = ..., - *, - tts: bool = ..., - embed: Embed = ..., - file: File = ..., - stickers: Sequence[Union[GuildSticker, StickerItem]] = ..., - delete_after: float = ..., - nonce: Union[str, int] = ..., - allowed_mentions: AllowedMentions = ..., - reference: Union[Message, MessageReference, PartialMessage] = ..., - mention_author: bool = ..., - view: View = ..., - suppress_embeds: bool = ..., - ephemeral: bool = ..., - silent: bool = ..., - ) -> Message: - ... - - @overload - async def send( - self, - content: Optional[str] = ..., - *, - tts: bool = ..., - embed: Embed = ..., - files: Sequence[File] = ..., - stickers: Sequence[Union[GuildSticker, StickerItem]] = ..., - delete_after: float = ..., - nonce: Union[str, int] = ..., - allowed_mentions: AllowedMentions = ..., - reference: Union[Message, MessageReference, PartialMessage] = ..., - mention_author: bool = ..., - view: View = ..., - suppress_embeds: bool = ..., - ephemeral: bool = ..., - silent: bool = ..., - ) -> Message: - ... - - @overload - async def send( - self, - content: Optional[str] = ..., - *, - tts: bool = ..., - embeds: Sequence[Embed] = ..., - file: File = ..., - stickers: Sequence[Union[GuildSticker, StickerItem]] = ..., - delete_after: float = ..., - nonce: Union[str, int] = ..., - allowed_mentions: AllowedMentions = ..., - reference: Union[Message, MessageReference, PartialMessage] = ..., - mention_author: bool = ..., - view: View = ..., - suppress_embeds: bool = ..., - ephemeral: bool = ..., - silent: bool = ..., - ) -> Message: - ... - - @overload - async def send( - self, - content: Optional[str] = ..., - *, - tts: bool = ..., - embeds: Sequence[Embed] = ..., - files: Sequence[File] = ..., - stickers: Sequence[Union[GuildSticker, StickerItem]] = ..., - delete_after: float = ..., - nonce: Union[str, int] = ..., - allowed_mentions: AllowedMentions = ..., - reference: Union[Message, MessageReference, PartialMessage] = ..., - mention_author: bool = ..., - view: View = ..., - suppress_embeds: bool = ..., - ephemeral: bool = ..., - silent: bool = ..., - ) -> Message: - ... - - async def send( - self, - content: Optional[str] = None, - *, - tts: bool = False, - embed: Optional[Embed] = None, - embeds: Optional[Sequence[Embed]] = None, - file: Optional[File] = None, - files: Optional[Sequence[File]] = None, - stickers: Optional[Sequence[Union[GuildSticker, StickerItem]]] = None, - delete_after: Optional[float] = None, - nonce: Optional[Union[str, int]] = None, - allowed_mentions: Optional[AllowedMentions] = None, - reference: Optional[Union[Message, MessageReference, PartialMessage]] = None, - mention_author: Optional[bool] = None, - view: Optional[View] = None, - suppress_embeds: bool = False, - ephemeral: bool = False, - silent: bool = False, - ) -> Message: - """|coro| - - Sends a message to the destination with the content given. - - This works similarly to :meth:`~discord.abc.Messageable.send` for non-interaction contexts. - - For interaction based contexts this does one of the following: - - - :meth:`discord.InteractionResponse.send_message` if no response has been given. - - A followup message if a response has been given. - - Regular send if the interaction has expired - - .. versionchanged:: 2.0 - This function will now raise :exc:`TypeError` or - :exc:`ValueError` instead of ``InvalidArgument``. - - Parameters - ------------ - content: Optional[:class:`str`] - The content of the message to send. - tts: :class:`bool` - Indicates if the message should be sent using text-to-speech. - embed: :class:`~discord.Embed` - The rich embed for the content. - file: :class:`~discord.File` - The file to upload. - files: List[:class:`~discord.File`] - A list of files to upload. Must be a maximum of 10. - nonce: :class:`int` - The nonce to use for sending this message. If the message was successfully sent, - then the message will have a nonce with this value. - delete_after: :class:`float` - If provided, the number of seconds to wait in the background - before deleting the message we just sent. If the deletion fails, - then it is silently ignored. - allowed_mentions: :class:`~discord.AllowedMentions` - Controls the mentions being processed in this message. If this is - passed, then the object is merged with :attr:`~discord.Client.allowed_mentions`. - The merging behaviour only overrides attributes that have been explicitly passed - to the object, otherwise it uses the attributes set in :attr:`~discord.Client.allowed_mentions`. - If no object is passed at all then the defaults given by :attr:`~discord.Client.allowed_mentions` - are used instead. - - .. versionadded:: 1.4 - - reference: Union[:class:`~discord.Message`, :class:`~discord.MessageReference`, :class:`~discord.PartialMessage`] - A reference to the :class:`~discord.Message` to which you are replying, this can be created using - :meth:`~discord.Message.to_reference` or passed directly as a :class:`~discord.Message`. You can control - whether this mentions the author of the referenced message using the :attr:`~discord.AllowedMentions.replied_user` - attribute of ``allowed_mentions`` or by setting ``mention_author``. - - This is ignored for interaction based contexts. - - .. versionadded:: 1.6 - - mention_author: Optional[:class:`bool`] - If set, overrides the :attr:`~discord.AllowedMentions.replied_user` attribute of ``allowed_mentions``. - This is ignored for interaction based contexts. - - .. versionadded:: 1.6 - view: :class:`discord.ui.View` - A Discord UI View to add to the message. - - .. versionadded:: 2.0 - embeds: List[:class:`~discord.Embed`] - A list of embeds to upload. Must be a maximum of 10. - - .. versionadded:: 2.0 - stickers: Sequence[Union[:class:`~discord.GuildSticker`, :class:`~discord.StickerItem`]] - A list of stickers to upload. Must be a maximum of 3. This is ignored for interaction based contexts. - - .. versionadded:: 2.0 - suppress_embeds: :class:`bool` - Whether to suppress embeds for the message. This sends the message without any embeds if set to ``True``. - - .. versionadded:: 2.0 - ephemeral: :class:`bool` - Indicates if the message should only be visible to the user who started the interaction. - If a view is sent with an ephemeral message and it has no timeout set then the timeout - is set to 15 minutes. **This is only applicable in contexts with an interaction**. - - .. versionadded:: 2.0 - silent: :class:`bool` - Whether to suppress push and desktop notifications for the message. This will increment the mention counter - in the UI, but will not actually send a notification. - - .. versionadded:: 2.2 - - Raises - -------- - ~discord.HTTPException - Sending the message failed. - ~discord.Forbidden - You do not have the proper permissions to send the message. - ValueError - The ``files`` list is not of the appropriate size. - TypeError - You specified both ``file`` and ``files``, - or you specified both ``embed`` and ``embeds``, - or the ``reference`` object is not a :class:`~discord.Message`, - :class:`~discord.MessageReference` or :class:`~discord.PartialMessage`. - - Returns - --------- - :class:`~discord.Message` - The message that was sent. - """ - - if self.interaction is None or self.interaction.is_expired(): - return await super().send( - content=content, - tts=tts, - embed=embed, - embeds=embeds, - file=file, - files=files, - stickers=stickers, - delete_after=delete_after, - nonce=nonce, - allowed_mentions=allowed_mentions, - reference=reference, - mention_author=mention_author, - view=view, - suppress_embeds=suppress_embeds, - silent=silent, - ) # type: ignore # The overloads don't support Optional but the implementation does - - # Convert the kwargs from None to MISSING to appease the remaining implementations - kwargs = { - 'content': content, - 'tts': tts, - 'embed': MISSING if embed is None else embed, - 'embeds': MISSING if embeds is None else embeds, - 'file': MISSING if file is None else file, - 'files': MISSING if files is None else files, - 'allowed_mentions': MISSING if allowed_mentions is None else allowed_mentions, - 'view': MISSING if view is None else view, - 'suppress_embeds': suppress_embeds, - 'ephemeral': ephemeral, - 'silent': silent, - } - - if self.interaction.response.is_done(): - msg = await self.interaction.followup.send(**kwargs, wait=True) - else: - await self.interaction.response.send_message(**kwargs) - msg = await self.interaction.original_response() - - if delete_after is not None: - await msg.delete(delay=delete_after) - return msg diff --git a/.venv/Lib/site-packages/discord/ext/commands/converter.py b/.venv/Lib/site-packages/discord/ext/commands/converter.py deleted file mode 100644 index 7255f17..0000000 --- a/.venv/Lib/site-packages/discord/ext/commands/converter.py +++ /dev/null @@ -1,1355 +0,0 @@ -""" -The MIT License (MIT) - -Copyright (c) 2015-present Rapptz - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the "Software"), -to deal in the Software without restriction, including without limitation -the rights to use, copy, modify, merge, publish, distribute, sublicense, -and/or sell copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. -""" - -from __future__ import annotations - -import inspect -import re -from typing import ( - TYPE_CHECKING, - Any, - Dict, - Generic, - Iterable, - List, - Literal, - Optional, - overload, - Protocol, - Tuple, - Type, - TypeVar, - Union, - runtime_checkable, -) -import types - -import discord - -from .errors import * - -if TYPE_CHECKING: - from discord.state import Channel - from discord.threads import Thread - - from .parameters import Parameter - from ._types import BotT, _Bot - from .context import Context - -__all__ = ( - 'Converter', - 'ObjectConverter', - 'MemberConverter', - 'UserConverter', - 'MessageConverter', - 'PartialMessageConverter', - 'TextChannelConverter', - 'InviteConverter', - 'GuildConverter', - 'RoleConverter', - 'GameConverter', - 'ColourConverter', - 'ColorConverter', - 'VoiceChannelConverter', - 'StageChannelConverter', - 'EmojiConverter', - 'PartialEmojiConverter', - 'CategoryChannelConverter', - 'ForumChannelConverter', - 'IDConverter', - 'ThreadConverter', - 'GuildChannelConverter', - 'GuildStickerConverter', - 'ScheduledEventConverter', - 'clean_content', - 'Greedy', - 'Range', - 'run_converters', -) - - -def _get_from_guilds(bot: _Bot, getter: str, argument: Any) -> Any: - result = None - for guild in bot.guilds: - result = getattr(guild, getter)(argument) - if result: - return result - return result - - -_utils_get = discord.utils.get -T = TypeVar('T') -T_co = TypeVar('T_co', covariant=True) -CT = TypeVar('CT', bound=discord.abc.GuildChannel) -TT = TypeVar('TT', bound=discord.Thread) - - -@runtime_checkable -class Converter(Protocol[T_co]): - """The base class of custom converters that require the :class:`.Context` - to be passed to be useful. - - This allows you to implement converters that function similar to the - special cased ``discord`` classes. - - Classes that derive from this should override the :meth:`~.Converter.convert` - method to do its conversion logic. This method must be a :ref:`coroutine `. - """ - - async def convert(self, ctx: Context[BotT], argument: str) -> T_co: - """|coro| - - The method to override to do conversion logic. - - If an error is found while converting, it is recommended to - raise a :exc:`.CommandError` derived exception as it will - properly propagate to the error handlers. - - Parameters - ----------- - ctx: :class:`.Context` - The invocation context that the argument is being used in. - argument: :class:`str` - The argument that is being converted. - - Raises - ------- - CommandError - A generic exception occurred when converting the argument. - BadArgument - The converter failed to convert the argument. - """ - raise NotImplementedError('Derived classes need to implement this.') - - -_ID_REGEX = re.compile(r'([0-9]{15,20})$') - - -class IDConverter(Converter[T_co]): - @staticmethod - def _get_id_match(argument): - return _ID_REGEX.match(argument) - - -class ObjectConverter(IDConverter[discord.Object]): - """Converts to a :class:`~discord.Object`. - - The argument must follow the valid ID or mention formats (e.g. ``<@80088516616269824>``). - - .. versionadded:: 2.0 - - The lookup strategy is as follows (in order): - - 1. Lookup by ID. - 2. Lookup by member, role, or channel mention. - """ - - async def convert(self, ctx: Context[BotT], argument: str) -> discord.Object: - match = self._get_id_match(argument) or re.match(r'<(?:@(?:!|&)?|#)([0-9]{15,20})>$', argument) - - if match is None: - raise ObjectNotFound(argument) - - result = int(match.group(1)) - - return discord.Object(id=result) - - -class MemberConverter(IDConverter[discord.Member]): - """Converts to a :class:`~discord.Member`. - - All lookups are via the local guild. If in a DM context, then the lookup - is done by the global cache. - - The lookup strategy is as follows (in order): - - 1. Lookup by ID. - 2. Lookup by mention. - 3. Lookup by username#discriminator (deprecated). - 4. Lookup by username#0 (deprecated, only gets users that migrated from their discriminator). - 5. Lookup by user name. - 6. Lookup by global name. - 7. Lookup by guild nickname. - - .. versionchanged:: 1.5 - Raise :exc:`.MemberNotFound` instead of generic :exc:`.BadArgument` - - .. versionchanged:: 1.5.1 - This converter now lazily fetches members from the gateway and HTTP APIs, - optionally caching the result if :attr:`.MemberCacheFlags.joined` is enabled. - - .. deprecated:: 2.3 - Looking up users by discriminator will be removed in a future version due to - the removal of discriminators in an API change. - """ - - async def query_member_named(self, guild: discord.Guild, argument: str) -> Optional[discord.Member]: - cache = guild._state.member_cache_flags.joined - username, _, discriminator = argument.rpartition('#') - - # If # isn't found then "discriminator" actually has the username - if not username: - discriminator, username = username, discriminator - - if discriminator == '0' or (len(discriminator) == 4 and discriminator.isdigit()): - lookup = username - predicate = lambda m: m.name == username and m.discriminator == discriminator - else: - lookup = argument - predicate = lambda m: m.name == argument or m.global_name == argument or m.nick == argument - - members = await guild.query_members(lookup, limit=100, cache=cache) - return discord.utils.find(predicate, members) - - async def query_member_by_id(self, bot: _Bot, guild: discord.Guild, user_id: int) -> Optional[discord.Member]: - ws = bot._get_websocket(shard_id=guild.shard_id) - cache = guild._state.member_cache_flags.joined - if ws.is_ratelimited(): - # If we're being rate limited on the WS, then fall back to using the HTTP API - # So we don't have to wait ~60 seconds for the query to finish - try: - member = await guild.fetch_member(user_id) - except discord.HTTPException: - return None - - if cache: - guild._add_member(member) - return member - - # If we're not being rate limited then we can use the websocket to actually query - members = await guild.query_members(limit=1, user_ids=[user_id], cache=cache) - if not members: - return None - return members[0] - - async def convert(self, ctx: Context[BotT], argument: str) -> discord.Member: - bot = ctx.bot - match = self._get_id_match(argument) or re.match(r'<@!?([0-9]{15,20})>$', argument) - guild = ctx.guild - result = None - user_id = None - - if match is None: - # not a mention... - if guild: - result = guild.get_member_named(argument) - else: - result = _get_from_guilds(bot, 'get_member_named', argument) - else: - user_id = int(match.group(1)) - if guild: - result = guild.get_member(user_id) or _utils_get(ctx.message.mentions, id=user_id) - else: - result = _get_from_guilds(bot, 'get_member', user_id) - - if not isinstance(result, discord.Member): - if guild is None: - raise MemberNotFound(argument) - - if user_id is not None: - result = await self.query_member_by_id(bot, guild, user_id) - else: - result = await self.query_member_named(guild, argument) - - if not result: - raise MemberNotFound(argument) - - return result - - -class UserConverter(IDConverter[discord.User]): - """Converts to a :class:`~discord.User`. - - All lookups are via the global user cache. - - The lookup strategy is as follows (in order): - - 1. Lookup by ID. - 2. Lookup by mention. - 3. Lookup by username#discriminator (deprecated). - 4. Lookup by username#0 (deprecated, only gets users that migrated from their discriminator). - 5. Lookup by user name. - 6. Lookup by global name. - - .. versionchanged:: 1.5 - Raise :exc:`.UserNotFound` instead of generic :exc:`.BadArgument` - - .. versionchanged:: 1.6 - This converter now lazily fetches users from the HTTP APIs if an ID is passed - and it's not available in cache. - - .. deprecated:: 2.3 - Looking up users by discriminator will be removed in a future version due to - the removal of discriminators in an API change. - """ - - async def convert(self, ctx: Context[BotT], argument: str) -> discord.User: - match = self._get_id_match(argument) or re.match(r'<@!?([0-9]{15,20})>$', argument) - result = None - state = ctx._state - - if match is not None: - user_id = int(match.group(1)) - result = ctx.bot.get_user(user_id) or _utils_get(ctx.message.mentions, id=user_id) - if result is None: - try: - result = await ctx.bot.fetch_user(user_id) - except discord.HTTPException: - raise UserNotFound(argument) from None - - return result # type: ignore - - username, _, discriminator = argument.rpartition('#') - - # If # isn't found then "discriminator" actually has the username - if not username: - discriminator, username = username, discriminator - - if discriminator == '0' or (len(discriminator) == 4 and discriminator.isdigit()): - predicate = lambda u: u.name == username and u.discriminator == discriminator - else: - predicate = lambda u: u.name == argument or u.global_name == argument - - result = discord.utils.find(predicate, state._users.values()) - if result is None: - raise UserNotFound(argument) - - return result - - -class PartialMessageConverter(Converter[discord.PartialMessage]): - """Converts to a :class:`discord.PartialMessage`. - - .. versionadded:: 1.7 - - The creation strategy is as follows (in order): - - 1. By "{channel ID}-{message ID}" (retrieved by shift-clicking on "Copy ID") - 2. By message ID (The message is assumed to be in the context channel.) - 3. By message URL - """ - - @staticmethod - def _get_id_matches(ctx: Context[BotT], argument: str) -> Tuple[Optional[int], int, int]: - id_regex = re.compile(r'(?:(?P[0-9]{15,20})-)?(?P[0-9]{15,20})$') - link_regex = re.compile( - r'https?://(?:(ptb|canary|www)\.)?discord(?:app)?\.com/channels/' - r'(?P[0-9]{15,20}|@me)' - r'/(?P[0-9]{15,20})/(?P[0-9]{15,20})/?$' - ) - match = id_regex.match(argument) or link_regex.match(argument) - if not match: - raise MessageNotFound(argument) - data = match.groupdict() - channel_id = discord.utils._get_as_snowflake(data, 'channel_id') or ctx.channel.id - message_id = int(data['message_id']) - guild_id = data.get('guild_id') - if guild_id is None: - guild_id = ctx.guild and ctx.guild.id - elif guild_id == '@me': - guild_id = None - else: - guild_id = int(guild_id) - return guild_id, message_id, channel_id - - @staticmethod - def _resolve_channel( - ctx: Context[BotT], guild_id: Optional[int], channel_id: Optional[int] - ) -> Optional[Union[Channel, Thread]]: - if channel_id is None: - # we were passed just a message id so we can assume the channel is the current context channel - return ctx.channel - - if guild_id is not None: - guild = ctx.bot.get_guild(guild_id) - if guild is None: - return None - return guild._resolve_channel(channel_id) - - return ctx.bot.get_channel(channel_id) - - async def convert(self, ctx: Context[BotT], argument: str) -> discord.PartialMessage: - guild_id, message_id, channel_id = self._get_id_matches(ctx, argument) - channel = self._resolve_channel(ctx, guild_id, channel_id) - if not channel or not isinstance(channel, discord.abc.Messageable): - raise ChannelNotFound(channel_id) - return discord.PartialMessage(channel=channel, id=message_id) - - -class MessageConverter(IDConverter[discord.Message]): - """Converts to a :class:`discord.Message`. - - .. versionadded:: 1.1 - - The lookup strategy is as follows (in order): - - 1. Lookup by "{channel ID}-{message ID}" (retrieved by shift-clicking on "Copy ID") - 2. Lookup by message ID (the message **must** be in the context channel) - 3. Lookup by message URL - - .. versionchanged:: 1.5 - Raise :exc:`.ChannelNotFound`, :exc:`.MessageNotFound` or :exc:`.ChannelNotReadable` instead of generic :exc:`.BadArgument` - """ - - async def convert(self, ctx: Context[BotT], argument: str) -> discord.Message: - guild_id, message_id, channel_id = PartialMessageConverter._get_id_matches(ctx, argument) - message = ctx.bot._connection._get_message(message_id) - if message: - return message - channel = PartialMessageConverter._resolve_channel(ctx, guild_id, channel_id) - if not channel or not isinstance(channel, discord.abc.Messageable): - raise ChannelNotFound(channel_id) - try: - return await channel.fetch_message(message_id) - except discord.NotFound: - raise MessageNotFound(argument) - except discord.Forbidden: - raise ChannelNotReadable(channel) # type: ignore # type-checker thinks channel could be a DMChannel at this point - - -class GuildChannelConverter(IDConverter[discord.abc.GuildChannel]): - """Converts to a :class:`~discord.abc.GuildChannel`. - - All lookups are via the local guild. If in a DM context, then the lookup - is done by the global cache. - - The lookup strategy is as follows (in order): - - 1. Lookup by ID. - 2. Lookup by mention. - 3. Lookup by name. - - .. versionadded:: 2.0 - """ - - async def convert(self, ctx: Context[BotT], argument: str) -> discord.abc.GuildChannel: - return self._resolve_channel(ctx, argument, 'channels', discord.abc.GuildChannel) - - @staticmethod - def _resolve_channel(ctx: Context[BotT], argument: str, attribute: str, type: Type[CT]) -> CT: - bot = ctx.bot - - match = IDConverter._get_id_match(argument) or re.match(r'<#([0-9]{15,20})>$', argument) - result = None - guild = ctx.guild - - if match is None: - # not a mention - if guild: - iterable: Iterable[CT] = getattr(guild, attribute) - result: Optional[CT] = discord.utils.get(iterable, name=argument) - else: - - def check(c): - return isinstance(c, type) and c.name == argument - - result = discord.utils.find(check, bot.get_all_channels()) # type: ignore - else: - channel_id = int(match.group(1)) - if guild: - # guild.get_channel returns an explicit union instead of the base class - result = guild.get_channel(channel_id) # type: ignore - else: - result = _get_from_guilds(bot, 'get_channel', channel_id) - - if not isinstance(result, type): - raise ChannelNotFound(argument) - - return result - - @staticmethod - def _resolve_thread(ctx: Context[BotT], argument: str, attribute: str, type: Type[TT]) -> TT: - match = IDConverter._get_id_match(argument) or re.match(r'<#([0-9]{15,20})>$', argument) - result = None - guild = ctx.guild - - if match is None: - # not a mention - if guild: - iterable: Iterable[TT] = getattr(guild, attribute) - result: Optional[TT] = discord.utils.get(iterable, name=argument) - else: - thread_id = int(match.group(1)) - if guild: - result = guild.get_thread(thread_id) # type: ignore - - if not result or not isinstance(result, type): - raise ThreadNotFound(argument) - - return result - - -class TextChannelConverter(IDConverter[discord.TextChannel]): - """Converts to a :class:`~discord.TextChannel`. - - All lookups are via the local guild. If in a DM context, then the lookup - is done by the global cache. - - The lookup strategy is as follows (in order): - - 1. Lookup by ID. - 2. Lookup by mention. - 3. Lookup by name - - .. versionchanged:: 1.5 - Raise :exc:`.ChannelNotFound` instead of generic :exc:`.BadArgument` - """ - - async def convert(self, ctx: Context[BotT], argument: str) -> discord.TextChannel: - return GuildChannelConverter._resolve_channel(ctx, argument, 'text_channels', discord.TextChannel) - - -class VoiceChannelConverter(IDConverter[discord.VoiceChannel]): - """Converts to a :class:`~discord.VoiceChannel`. - - All lookups are via the local guild. If in a DM context, then the lookup - is done by the global cache. - - The lookup strategy is as follows (in order): - - 1. Lookup by ID. - 2. Lookup by mention. - 3. Lookup by name - - .. versionchanged:: 1.5 - Raise :exc:`.ChannelNotFound` instead of generic :exc:`.BadArgument` - """ - - async def convert(self, ctx: Context[BotT], argument: str) -> discord.VoiceChannel: - return GuildChannelConverter._resolve_channel(ctx, argument, 'voice_channels', discord.VoiceChannel) - - -class StageChannelConverter(IDConverter[discord.StageChannel]): - """Converts to a :class:`~discord.StageChannel`. - - .. versionadded:: 1.7 - - All lookups are via the local guild. If in a DM context, then the lookup - is done by the global cache. - - The lookup strategy is as follows (in order): - - 1. Lookup by ID. - 2. Lookup by mention. - 3. Lookup by name - """ - - async def convert(self, ctx: Context[BotT], argument: str) -> discord.StageChannel: - return GuildChannelConverter._resolve_channel(ctx, argument, 'stage_channels', discord.StageChannel) - - -class CategoryChannelConverter(IDConverter[discord.CategoryChannel]): - """Converts to a :class:`~discord.CategoryChannel`. - - All lookups are via the local guild. If in a DM context, then the lookup - is done by the global cache. - - The lookup strategy is as follows (in order): - - 1. Lookup by ID. - 2. Lookup by mention. - 3. Lookup by name - - .. versionchanged:: 1.5 - Raise :exc:`.ChannelNotFound` instead of generic :exc:`.BadArgument` - """ - - async def convert(self, ctx: Context[BotT], argument: str) -> discord.CategoryChannel: - return GuildChannelConverter._resolve_channel(ctx, argument, 'categories', discord.CategoryChannel) - - -class ThreadConverter(IDConverter[discord.Thread]): - """Converts to a :class:`~discord.Thread`. - - All lookups are via the local guild. - - The lookup strategy is as follows (in order): - - 1. Lookup by ID. - 2. Lookup by mention. - 3. Lookup by name. - - .. versionadded: 2.0 - """ - - async def convert(self, ctx: Context[BotT], argument: str) -> discord.Thread: - return GuildChannelConverter._resolve_thread(ctx, argument, 'threads', discord.Thread) - - -class ForumChannelConverter(IDConverter[discord.ForumChannel]): - """Converts to a :class:`~discord.ForumChannel`. - - All lookups are via the local guild. If in a DM context, then the lookup - is done by the global cache. - - The lookup strategy is as follows (in order): - - 1. Lookup by ID. - 2. Lookup by mention. - 3. Lookup by name - - .. versionadded:: 2.0 - """ - - async def convert(self, ctx: Context[BotT], argument: str) -> discord.ForumChannel: - return GuildChannelConverter._resolve_channel(ctx, argument, 'forums', discord.ForumChannel) - - -class ColourConverter(Converter[discord.Colour]): - """Converts to a :class:`~discord.Colour`. - - .. versionchanged:: 1.5 - Add an alias named ColorConverter - - The following formats are accepted: - - - ``0x`` - - ``#`` - - ``0x#`` - - ``rgb(, , )`` - - Any of the ``classmethod`` in :class:`~discord.Colour` - - - The ``_`` in the name can be optionally replaced with spaces. - - Like CSS, ```` can be either 0-255 or 0-100% and ```` can be - either a 6 digit hex number or a 3 digit hex shortcut (e.g. #fff). - - .. versionchanged:: 1.5 - Raise :exc:`.BadColourArgument` instead of generic :exc:`.BadArgument` - - .. versionchanged:: 1.7 - Added support for ``rgb`` function and 3-digit hex shortcuts - """ - - async def convert(self, ctx: Context[BotT], argument: str) -> discord.Colour: - try: - return discord.Colour.from_str(argument) - except ValueError: - arg = argument.lower().replace(' ', '_') - method = getattr(discord.Colour, arg, None) - if arg.startswith('from_') or method is None or not inspect.ismethod(method): - raise BadColourArgument(arg) - return method() - - -ColorConverter = ColourConverter - - -class RoleConverter(IDConverter[discord.Role]): - """Converts to a :class:`~discord.Role`. - - All lookups are via the local guild. If in a DM context, the converter raises - :exc:`.NoPrivateMessage` exception. - - The lookup strategy is as follows (in order): - - 1. Lookup by ID. - 2. Lookup by mention. - 3. Lookup by name - - .. versionchanged:: 1.5 - Raise :exc:`.RoleNotFound` instead of generic :exc:`.BadArgument` - """ - - async def convert(self, ctx: Context[BotT], argument: str) -> discord.Role: - guild = ctx.guild - if not guild: - raise NoPrivateMessage() - - match = self._get_id_match(argument) or re.match(r'<@&([0-9]{15,20})>$', argument) - if match: - result = guild.get_role(int(match.group(1))) - else: - result = discord.utils.get(guild._roles.values(), name=argument) - - if result is None: - raise RoleNotFound(argument) - return result - - -class GameConverter(Converter[discord.Game]): - """Converts to a :class:`~discord.Game`.""" - - async def convert(self, ctx: Context[BotT], argument: str) -> discord.Game: - return discord.Game(name=argument) - - -class InviteConverter(Converter[discord.Invite]): - """Converts to a :class:`~discord.Invite`. - - This is done via an HTTP request using :meth:`.Bot.fetch_invite`. - - .. versionchanged:: 1.5 - Raise :exc:`.BadInviteArgument` instead of generic :exc:`.BadArgument` - """ - - async def convert(self, ctx: Context[BotT], argument: str) -> discord.Invite: - try: - invite = await ctx.bot.fetch_invite(argument) - return invite - except Exception as exc: - raise BadInviteArgument(argument) from exc - - -class GuildConverter(IDConverter[discord.Guild]): - """Converts to a :class:`~discord.Guild`. - - The lookup strategy is as follows (in order): - - 1. Lookup by ID. - 2. Lookup by name. (There is no disambiguation for Guilds with multiple matching names). - - .. versionadded:: 1.7 - """ - - async def convert(self, ctx: Context[BotT], argument: str) -> discord.Guild: - match = self._get_id_match(argument) - result = None - - if match is not None: - guild_id = int(match.group(1)) - result = ctx.bot.get_guild(guild_id) - - if result is None: - result = discord.utils.get(ctx.bot.guilds, name=argument) - - if result is None: - raise GuildNotFound(argument) - return result - - -class EmojiConverter(IDConverter[discord.Emoji]): - """Converts to a :class:`~discord.Emoji`. - - All lookups are done for the local guild first, if available. If that lookup - fails, then it checks the client's global cache. - - The lookup strategy is as follows (in order): - - 1. Lookup by ID. - 2. Lookup by extracting ID from the emoji. - 3. Lookup by name - - .. versionchanged:: 1.5 - Raise :exc:`.EmojiNotFound` instead of generic :exc:`.BadArgument` - """ - - async def convert(self, ctx: Context[BotT], argument: str) -> discord.Emoji: - match = self._get_id_match(argument) or re.match(r'$', argument) - result = None - bot = ctx.bot - guild = ctx.guild - - if match is None: - # Try to get the emoji by name. Try local guild first. - if guild: - result = discord.utils.get(guild.emojis, name=argument) - - if result is None: - result = discord.utils.get(bot.emojis, name=argument) - else: - emoji_id = int(match.group(1)) - - # Try to look up emoji by id. - result = bot.get_emoji(emoji_id) - - if result is None: - raise EmojiNotFound(argument) - - return result - - -class PartialEmojiConverter(Converter[discord.PartialEmoji]): - """Converts to a :class:`~discord.PartialEmoji`. - - This is done by extracting the animated flag, name and ID from the emoji. - - .. versionchanged:: 1.5 - Raise :exc:`.PartialEmojiConversionFailure` instead of generic :exc:`.BadArgument` - """ - - async def convert(self, ctx: Context[BotT], argument: str) -> discord.PartialEmoji: - match = re.match(r'<(a?):([a-zA-Z0-9\_]{1,32}):([0-9]{15,20})>$', argument) - - if match: - emoji_animated = bool(match.group(1)) - emoji_name = match.group(2) - emoji_id = int(match.group(3)) - - return discord.PartialEmoji.with_state( - ctx.bot._connection, animated=emoji_animated, name=emoji_name, id=emoji_id - ) - - raise PartialEmojiConversionFailure(argument) - - -class GuildStickerConverter(IDConverter[discord.GuildSticker]): - """Converts to a :class:`~discord.GuildSticker`. - - All lookups are done for the local guild first, if available. If that lookup - fails, then it checks the client's global cache. - - The lookup strategy is as follows (in order): - - 1. Lookup by ID. - 2. Lookup by name. - - .. versionadded:: 2.0 - """ - - async def convert(self, ctx: Context[BotT], argument: str) -> discord.GuildSticker: - match = self._get_id_match(argument) - result = None - bot = ctx.bot - guild = ctx.guild - - if match is None: - # Try to get the sticker by name. Try local guild first. - if guild: - result = discord.utils.get(guild.stickers, name=argument) - - if result is None: - result = discord.utils.get(bot.stickers, name=argument) - else: - sticker_id = int(match.group(1)) - - # Try to look up sticker by id. - result = bot.get_sticker(sticker_id) - - if result is None: - raise GuildStickerNotFound(argument) - - return result - - -class ScheduledEventConverter(IDConverter[discord.ScheduledEvent]): - """Converts to a :class:`~discord.ScheduledEvent`. - - Lookups are done for the local guild if available. Otherwise, for a DM context, - lookup is done by the global cache. - - The lookup strategy is as follows (in order): - - 1. Lookup by ID. - 2. Lookup by url. - 3. Lookup by name. - - .. versionadded:: 2.0 - """ - - async def convert(self, ctx: Context[BotT], argument: str) -> discord.ScheduledEvent: - guild = ctx.guild - match = self._get_id_match(argument) - result = None - - if match: - # ID match - event_id = int(match.group(1)) - if guild: - result = guild.get_scheduled_event(event_id) - else: - for guild in ctx.bot.guilds: - result = guild.get_scheduled_event(event_id) - if result: - break - else: - pattern = ( - r'https?://(?:(ptb|canary|www)\.)?discord\.com/events/' - r'(?P[0-9]{15,20})/' - r'(?P[0-9]{15,20})$' - ) - match = re.match(pattern, argument, flags=re.I) - if match: - # URL match - guild = ctx.bot.get_guild(int(match.group('guild_id'))) - - if guild: - event_id = int(match.group('event_id')) - result = guild.get_scheduled_event(event_id) - else: - # lookup by name - if guild: - result = discord.utils.get(guild.scheduled_events, name=argument) - else: - for guild in ctx.bot.guilds: - result = discord.utils.get(guild.scheduled_events, name=argument) - if result: - break - if result is None: - raise ScheduledEventNotFound(argument) - - return result - - -class clean_content(Converter[str]): - """Converts the argument to mention scrubbed version of - said content. - - This behaves similarly to :attr:`~discord.Message.clean_content`. - - Attributes - ------------ - fix_channel_mentions: :class:`bool` - Whether to clean channel mentions. - use_nicknames: :class:`bool` - Whether to use nicknames when transforming mentions. - escape_markdown: :class:`bool` - Whether to also escape special markdown characters. - remove_markdown: :class:`bool` - Whether to also remove special markdown characters. This option is not supported with ``escape_markdown`` - - .. versionadded:: 1.7 - """ - - def __init__( - self, - *, - fix_channel_mentions: bool = False, - use_nicknames: bool = True, - escape_markdown: bool = False, - remove_markdown: bool = False, - ) -> None: - self.fix_channel_mentions = fix_channel_mentions - self.use_nicknames = use_nicknames - self.escape_markdown = escape_markdown - self.remove_markdown = remove_markdown - - async def convert(self, ctx: Context[BotT], argument: str) -> str: - msg = ctx.message - - if ctx.guild: - - def resolve_member(id: int) -> str: - m = _utils_get(msg.mentions, id=id) or ctx.guild.get_member(id) # type: ignore - return f'@{m.display_name if self.use_nicknames else m.name}' if m else '@deleted-user' - - def resolve_role(id: int) -> str: - r = _utils_get(msg.role_mentions, id=id) or ctx.guild.get_role(id) # type: ignore - return f'@{r.name}' if r else '@deleted-role' - - else: - - def resolve_member(id: int) -> str: - m = _utils_get(msg.mentions, id=id) or ctx.bot.get_user(id) - return f'@{m.display_name}' if m else '@deleted-user' - - def resolve_role(id: int) -> str: - return '@deleted-role' - - if self.fix_channel_mentions and ctx.guild: - - def resolve_channel(id: int) -> str: - c = ctx.guild._resolve_channel(id) # type: ignore - return f'#{c.name}' if c else '#deleted-channel' - - else: - - def resolve_channel(id: int) -> str: - return f'<#{id}>' - - transforms = { - '@': resolve_member, - '@!': resolve_member, - '#': resolve_channel, - '@&': resolve_role, - } - - def repl(match: re.Match) -> str: - type = match[1] - id = int(match[2]) - transformed = transforms[type](id) - return transformed - - result = re.sub(r'<(@[!&]?|#)([0-9]{15,20})>', repl, argument) - if self.escape_markdown: - result = discord.utils.escape_markdown(result) - elif self.remove_markdown: - result = discord.utils.remove_markdown(result) - - # Completely ensure no mentions escape: - return discord.utils.escape_mentions(result) - - -class Greedy(List[T]): - r"""A special converter that greedily consumes arguments until it can't. - As a consequence of this behaviour, most input errors are silently discarded, - since it is used as an indicator of when to stop parsing. - - When a parser error is met the greedy converter stops converting, undoes the - internal string parsing routine, and continues parsing regularly. - - For example, in the following code: - - .. code-block:: python3 - - @commands.command() - async def test(ctx, numbers: Greedy[int], reason: str): - await ctx.send("numbers: {}, reason: {}".format(numbers, reason)) - - An invocation of ``[p]test 1 2 3 4 5 6 hello`` would pass ``numbers`` with - ``[1, 2, 3, 4, 5, 6]`` and ``reason`` with ``hello``\. - - For more information, check :ref:`ext_commands_special_converters`. - - .. note:: - - For interaction based contexts the conversion error is propagated - rather than swallowed due to the difference in user experience with - application commands. - """ - - __slots__ = ('converter',) - - def __init__(self, *, converter: T) -> None: - self.converter: T = converter - - def __repr__(self) -> str: - converter = getattr(self.converter, '__name__', repr(self.converter)) - return f'Greedy[{converter}]' - - def __class_getitem__(cls, params: Union[Tuple[T], T]) -> Greedy[T]: - if not isinstance(params, tuple): - params = (params,) - if len(params) != 1: - raise TypeError('Greedy[...] only takes a single argument') - converter = params[0] - - args = getattr(converter, '__args__', ()) - if discord.utils.PY_310 and converter.__class__ is types.UnionType: # type: ignore - converter = Union[args] # type: ignore - - origin = getattr(converter, '__origin__', None) - - if not (callable(converter) or isinstance(converter, Converter) or origin is not None): - raise TypeError('Greedy[...] expects a type or a Converter instance.') - - if converter in (str, type(None)) or origin is Greedy: - raise TypeError(f'Greedy[{converter.__name__}] is invalid.') # type: ignore - - if origin is Union and type(None) in args: - raise TypeError(f'Greedy[{converter!r}] is invalid.') - - return cls(converter=converter) - - @property - def constructed_converter(self) -> Any: - # Only construct a converter once in order to maintain state between convert calls - if ( - inspect.isclass(self.converter) - and issubclass(self.converter, Converter) - and not inspect.ismethod(self.converter.convert) - ): - return self.converter() - return self.converter - - -if TYPE_CHECKING: - from typing_extensions import Annotated as Range -else: - - class Range: - """A special converter that can be applied to a parameter to require a numeric - or string type to fit within the range provided. - - During type checking time this is equivalent to :obj:`typing.Annotated` so type checkers understand - the intent of the code. - - Some example ranges: - - - ``Range[int, 10]`` means the minimum is 10 with no maximum. - - ``Range[int, None, 10]`` means the maximum is 10 with no minimum. - - ``Range[int, 1, 10]`` means the minimum is 1 and the maximum is 10. - - ``Range[float, 1.0, 5.0]`` means the minimum is 1.0 and the maximum is 5.0. - - ``Range[str, 1, 10]`` means the minimum length is 1 and the maximum length is 10. - - Inside a :class:`HybridCommand` this functions equivalently to :class:`discord.app_commands.Range`. - - If the value cannot be converted to the provided type or is outside the given range, - :class:`~.ext.commands.BadArgument` or :class:`~.ext.commands.RangeError` is raised to - the appropriate error handlers respectively. - - .. versionadded:: 2.0 - - Examples - ---------- - - .. code-block:: python3 - - @bot.command() - async def range(ctx: commands.Context, value: commands.Range[int, 10, 12]): - await ctx.send(f'Your value is {value}') - """ - - def __init__( - self, - *, - annotation: Any, - min: Optional[Union[int, float]] = None, - max: Optional[Union[int, float]] = None, - ) -> None: - self.annotation: Any = annotation - self.min: Optional[Union[int, float]] = min - self.max: Optional[Union[int, float]] = max - - if min and max and min > max: - raise TypeError('minimum cannot be larger than maximum') - - async def convert(self, ctx: Context[BotT], value: str) -> Union[int, float]: - try: - count = converted = self.annotation(value) - except ValueError: - raise BadArgument( - f'Converting to "{self.annotation.__name__}" failed for parameter "{ctx.current_parameter.name}".' - ) - - if self.annotation is str: - count = len(value) - - if (self.min is not None and count < self.min) or (self.max is not None and count > self.max): - raise RangeError(converted, minimum=self.min, maximum=self.max) - - return converted - - def __call__(self) -> None: - # Trick to allow it inside typing.Union - pass - - def __or__(self, rhs) -> Any: - return Union[self, rhs] - - def __repr__(self) -> str: - return f'{self.__class__.__name__}[{self.annotation.__name__}, {self.min}, {self.max}]' - - def __class_getitem__(cls, obj) -> Range: - if not isinstance(obj, tuple): - raise TypeError(f'expected tuple for arguments, received {obj.__class__.__name__} instead') - - if len(obj) == 2: - obj = (*obj, None) - elif len(obj) != 3: - raise TypeError('Range accepts either two or three arguments with the first being the type of range.') - - annotation, min, max = obj - - if min is None and max is None: - raise TypeError('Range must not be empty') - - if min is not None and max is not None: - # At this point max and min are both not none - if type(min) != type(max): - raise TypeError('Both min and max in Range must be the same type') - - if annotation not in (int, float, str): - raise TypeError(f'expected int, float, or str as range type, received {annotation!r} instead') - - if annotation in (str, int): - cast = int - else: - cast = float - - return cls( - annotation=annotation, - min=cast(min) if min is not None else None, - max=cast(max) if max is not None else None, - ) - - -def _convert_to_bool(argument: str) -> bool: - lowered = argument.lower() - if lowered in ('yes', 'y', 'true', 't', '1', 'enable', 'on'): - return True - elif lowered in ('no', 'n', 'false', 'f', '0', 'disable', 'off'): - return False - else: - raise BadBoolArgument(lowered) - - -_GenericAlias = type(List[T]) - - -def is_generic_type(tp: Any, *, _GenericAlias: type = _GenericAlias) -> bool: - return isinstance(tp, type) and issubclass(tp, Generic) or isinstance(tp, _GenericAlias) - - -CONVERTER_MAPPING: Dict[type, Any] = { - discord.Object: ObjectConverter, - discord.Member: MemberConverter, - discord.User: UserConverter, - discord.Message: MessageConverter, - discord.PartialMessage: PartialMessageConverter, - discord.TextChannel: TextChannelConverter, - discord.Invite: InviteConverter, - discord.Guild: GuildConverter, - discord.Role: RoleConverter, - discord.Game: GameConverter, - discord.Colour: ColourConverter, - discord.VoiceChannel: VoiceChannelConverter, - discord.StageChannel: StageChannelConverter, - discord.Emoji: EmojiConverter, - discord.PartialEmoji: PartialEmojiConverter, - discord.CategoryChannel: CategoryChannelConverter, - discord.Thread: ThreadConverter, - discord.abc.GuildChannel: GuildChannelConverter, - discord.GuildSticker: GuildStickerConverter, - discord.ScheduledEvent: ScheduledEventConverter, - discord.ForumChannel: ForumChannelConverter, -} - - -async def _actual_conversion(ctx: Context[BotT], converter: Any, argument: str, param: inspect.Parameter): - if converter is bool: - return _convert_to_bool(argument) - - try: - module = converter.__module__ - except AttributeError: - pass - else: - if module is not None and (module.startswith('discord.') and not module.endswith('converter')): - converter = CONVERTER_MAPPING.get(converter, converter) - - try: - if inspect.isclass(converter) and issubclass(converter, Converter): - if inspect.ismethod(converter.convert): - return await converter.convert(ctx, argument) - else: - return await converter().convert(ctx, argument) - elif isinstance(converter, Converter): - return await converter.convert(ctx, argument) # type: ignore - except CommandError: - raise - except Exception as exc: - raise ConversionError(converter, exc) from exc # type: ignore - - try: - return converter(argument) - except CommandError: - raise - except Exception as exc: - try: - name = converter.__name__ - except AttributeError: - name = converter.__class__.__name__ - - raise BadArgument(f'Converting to "{name}" failed for parameter "{param.name}".') from exc - - -@overload -async def run_converters( - ctx: Context[BotT], converter: Union[Type[Converter[T]], Converter[T]], argument: str, param: Parameter -) -> T: - ... - - -@overload -async def run_converters(ctx: Context[BotT], converter: Any, argument: str, param: Parameter) -> Any: - ... - - -async def run_converters(ctx: Context[BotT], converter: Any, argument: str, param: Parameter) -> Any: - """|coro| - - Runs converters for a given converter, argument, and parameter. - - This function does the same work that the library does under the hood. - - .. versionadded:: 2.0 - - Parameters - ------------ - ctx: :class:`Context` - The invocation context to run the converters under. - converter: Any - The converter to run, this corresponds to the annotation in the function. - argument: :class:`str` - The argument to convert to. - param: :class:`Parameter` - The parameter being converted. This is mainly for error reporting. - - Raises - ------- - CommandError - The converter failed to convert. - - Returns - -------- - Any - The resulting conversion. - """ - origin = getattr(converter, '__origin__', None) - - if origin is Union: - errors = [] - _NoneType = type(None) - union_args = converter.__args__ - for conv in union_args: - # if we got to this part in the code, then the previous conversions have failed - # so we should just undo the view, return the default, and allow parsing to continue - # with the other parameters - if conv is _NoneType and param.kind != param.VAR_POSITIONAL: - ctx.view.undo() - return None if param.required else await param.get_default(ctx) - - try: - value = await run_converters(ctx, conv, argument, param) - except CommandError as exc: - errors.append(exc) - else: - return value - - # if we're here, then we failed all the converters - raise BadUnionArgument(param, union_args, errors) - - if origin is Literal: - errors = [] - conversions = {} - literal_args = converter.__args__ - for literal in literal_args: - literal_type = type(literal) - try: - value = conversions[literal_type] - except KeyError: - try: - value = await _actual_conversion(ctx, literal_type, argument, param) - except CommandError as exc: - errors.append(exc) - conversions[literal_type] = object() - continue - else: - conversions[literal_type] = value - - if value == literal: - return value - - # if we're here, then we failed to match all the literals - raise BadLiteralArgument(param, literal_args, errors, argument) - - # This must be the last if-clause in the chain of origin checking - # Nearly every type is a generic type within the typing library - # So care must be taken to make sure a more specialised origin handle - # isn't overwritten by the widest if clause - if origin is not None and is_generic_type(converter): - converter = origin - - return await _actual_conversion(ctx, converter, argument, param) diff --git a/.venv/Lib/site-packages/discord/ext/commands/cooldowns.py b/.venv/Lib/site-packages/discord/ext/commands/cooldowns.py deleted file mode 100644 index 2af7cb0..0000000 --- a/.venv/Lib/site-packages/discord/ext/commands/cooldowns.py +++ /dev/null @@ -1,285 +0,0 @@ -""" -The MIT License (MIT) - -Copyright (c) 2015-present Rapptz - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the "Software"), -to deal in the Software without restriction, including without limitation -the rights to use, copy, modify, merge, publish, distribute, sublicense, -and/or sell copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. -""" - -from __future__ import annotations - - -from typing import Any, Callable, Deque, Dict, Optional, Union, Generic, TypeVar, TYPE_CHECKING -from discord.enums import Enum -import time -import asyncio -from collections import deque - -from ...abc import PrivateChannel -from .errors import MaxConcurrencyReached -from .context import Context -from discord.app_commands import Cooldown as Cooldown - -if TYPE_CHECKING: - from typing_extensions import Self - - from ...message import Message - -__all__ = ( - 'BucketType', - 'Cooldown', - 'CooldownMapping', - 'DynamicCooldownMapping', - 'MaxConcurrency', -) - -T_contra = TypeVar('T_contra', contravariant=True) - - -class BucketType(Enum): - default = 0 - user = 1 - guild = 2 - channel = 3 - member = 4 - category = 5 - role = 6 - - def get_key(self, msg: Union[Message, Context[Any]]) -> Any: - if self is BucketType.user: - return msg.author.id - elif self is BucketType.guild: - return (msg.guild or msg.author).id - elif self is BucketType.channel: - return msg.channel.id - elif self is BucketType.member: - return ((msg.guild and msg.guild.id), msg.author.id) - elif self is BucketType.category: - return (msg.channel.category or msg.channel).id # type: ignore - elif self is BucketType.role: - # we return the channel id of a private-channel as there are only roles in guilds - # and that yields the same result as for a guild with only the @everyone role - # NOTE: PrivateChannel doesn't actually have an id attribute but we assume we are - # receiving a DMChannel or GroupChannel which inherit from PrivateChannel and do - return (msg.channel if isinstance(msg.channel, PrivateChannel) else msg.author.top_role).id # type: ignore - - def __call__(self, msg: Union[Message, Context[Any]]) -> Any: - return self.get_key(msg) - - -class CooldownMapping(Generic[T_contra]): - def __init__( - self, - original: Optional[Cooldown], - type: Callable[[T_contra], Any], - ) -> None: - if not callable(type): - raise TypeError('Cooldown type must be a BucketType or callable') - - self._cache: Dict[Any, Cooldown] = {} - self._cooldown: Optional[Cooldown] = original - self._type: Callable[[T_contra], Any] = type - - def copy(self) -> CooldownMapping[T_contra]: - ret = CooldownMapping(self._cooldown, self._type) - ret._cache = self._cache.copy() - return ret - - @property - def valid(self) -> bool: - return self._cooldown is not None - - @property - def type(self) -> Callable[[T_contra], Any]: - return self._type - - @classmethod - def from_cooldown(cls, rate: float, per: float, type: Callable[[T_contra], Any]) -> Self: - return cls(Cooldown(rate, per), type) - - def _bucket_key(self, msg: T_contra) -> Any: - return self._type(msg) - - def _verify_cache_integrity(self, current: Optional[float] = None) -> None: - # we want to delete all cache objects that haven't been used - # in a cooldown window. e.g. if we have a command that has a - # cooldown of 60s and it has not been used in 60s then that key should be deleted - current = current or time.time() - dead_keys = [k for k, v in self._cache.items() if current > v._last + v.per] - for k in dead_keys: - del self._cache[k] - - def create_bucket(self, message: T_contra) -> Cooldown: - return self._cooldown.copy() # type: ignore - - def get_bucket(self, message: T_contra, current: Optional[float] = None) -> Optional[Cooldown]: - if self._type is BucketType.default: - return self._cooldown - - self._verify_cache_integrity(current) - key = self._bucket_key(message) - if key not in self._cache: - bucket = self.create_bucket(message) - if bucket is not None: - self._cache[key] = bucket - else: - bucket = self._cache[key] - - return bucket - - def update_rate_limit(self, message: T_contra, current: Optional[float] = None, tokens: int = 1) -> Optional[float]: - bucket = self.get_bucket(message, current) - if bucket is None: - return None - return bucket.update_rate_limit(current, tokens=tokens) - - -class DynamicCooldownMapping(CooldownMapping[T_contra]): - def __init__( - self, - factory: Callable[[T_contra], Optional[Cooldown]], - type: Callable[[T_contra], Any], - ) -> None: - super().__init__(None, type) - self._factory: Callable[[T_contra], Optional[Cooldown]] = factory - - def copy(self) -> DynamicCooldownMapping[T_contra]: - ret = DynamicCooldownMapping(self._factory, self._type) - ret._cache = self._cache.copy() - return ret - - @property - def valid(self) -> bool: - return True - - def create_bucket(self, message: T_contra) -> Optional[Cooldown]: - return self._factory(message) - - -class _Semaphore: - """This class is a version of a semaphore. - - If you're wondering why asyncio.Semaphore isn't being used, - it's because it doesn't expose the internal value. This internal - value is necessary because I need to support both `wait=True` and - `wait=False`. - - An asyncio.Queue could have been used to do this as well -- but it is - not as inefficient since internally that uses two queues and is a bit - overkill for what is basically a counter. - """ - - __slots__ = ('value', 'loop', '_waiters') - - def __init__(self, number: int) -> None: - self.value: int = number - self.loop: asyncio.AbstractEventLoop = asyncio.get_running_loop() - self._waiters: Deque[asyncio.Future] = deque() - - def __repr__(self) -> str: - return f'<_Semaphore value={self.value} waiters={len(self._waiters)}>' - - def locked(self) -> bool: - return self.value == 0 - - def is_active(self) -> bool: - return len(self._waiters) > 0 - - def wake_up(self) -> None: - while self._waiters: - future = self._waiters.popleft() - if not future.done(): - future.set_result(None) - return - - async def acquire(self, *, wait: bool = False) -> bool: - if not wait and self.value <= 0: - # signal that we're not acquiring - return False - - while self.value <= 0: - future = self.loop.create_future() - self._waiters.append(future) - try: - await future - except: - future.cancel() - if self.value > 0 and not future.cancelled(): - self.wake_up() - raise - - self.value -= 1 - return True - - def release(self) -> None: - self.value += 1 - self.wake_up() - - -class MaxConcurrency: - __slots__ = ('number', 'per', 'wait', '_mapping') - - def __init__(self, number: int, *, per: BucketType, wait: bool) -> None: - self._mapping: Dict[Any, _Semaphore] = {} - self.per: BucketType = per - self.number: int = number - self.wait: bool = wait - - if number <= 0: - raise ValueError('max_concurrency \'number\' cannot be less than 1') - - if not isinstance(per, BucketType): - raise TypeError(f'max_concurrency \'per\' must be of type BucketType not {type(per)!r}') - - def copy(self) -> Self: - return self.__class__(self.number, per=self.per, wait=self.wait) - - def __repr__(self) -> str: - return f'' - - def get_key(self, message: Union[Message, Context[Any]]) -> Any: - return self.per.get_key(message) - - async def acquire(self, message: Union[Message, Context[Any]]) -> None: - key = self.get_key(message) - - try: - sem = self._mapping[key] - except KeyError: - self._mapping[key] = sem = _Semaphore(self.number) - - acquired = await sem.acquire(wait=self.wait) - if not acquired: - raise MaxConcurrencyReached(self.number, self.per) - - async def release(self, message: Union[Message, Context[Any]]) -> None: - # Technically there's no reason for this function to be async - # But it might be more useful in the future - key = self.get_key(message) - - try: - sem = self._mapping[key] - except KeyError: - # ...? peculiar - return - else: - sem.release() - - if sem.value >= self.number and not sem.is_active(): - del self._mapping[key] diff --git a/.venv/Lib/site-packages/discord/ext/commands/core.py b/.venv/Lib/site-packages/discord/ext/commands/core.py deleted file mode 100644 index ffbefe2..0000000 --- a/.venv/Lib/site-packages/discord/ext/commands/core.py +++ /dev/null @@ -1,2640 +0,0 @@ -""" -The MIT License (MIT) - -Copyright (c) 2015-present Rapptz - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the "Software"), -to deal in the Software without restriction, including without limitation -the rights to use, copy, modify, merge, publish, distribute, sublicense, -and/or sell copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. -""" -from __future__ import annotations - -import asyncio -import datetime -import functools -import inspect -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Dict, - Generator, - Generic, - List, - Literal, - Optional, - Set, - Tuple, - Type, - TypeVar, - Union, - overload, -) -import re - -import discord - -from ._types import _BaseCommand, CogT -from .cog import Cog -from .context import Context -from .converter import Greedy, run_converters -from .cooldowns import BucketType, Cooldown, CooldownMapping, DynamicCooldownMapping, MaxConcurrency -from .errors import * -from .parameters import Parameter, Signature -from discord.app_commands.commands import NUMPY_DOCSTRING_ARG_REGEX - -if TYPE_CHECKING: - from typing_extensions import Concatenate, ParamSpec, Self - - from ._types import BotT, Check, ContextT, Coro, CoroFunc, Error, Hook, UserCheck - - -__all__ = ( - 'Command', - 'Group', - 'GroupMixin', - 'command', - 'group', - 'has_role', - 'has_permissions', - 'has_any_role', - 'check', - 'check_any', - 'before_invoke', - 'after_invoke', - 'bot_has_role', - 'bot_has_permissions', - 'bot_has_any_role', - 'cooldown', - 'dynamic_cooldown', - 'max_concurrency', - 'dm_only', - 'guild_only', - 'is_owner', - 'is_nsfw', - 'has_guild_permissions', - 'bot_has_guild_permissions', -) - -MISSING: Any = discord.utils.MISSING - -T = TypeVar('T') -CommandT = TypeVar('CommandT', bound='Command[Any, ..., Any]') -# CHT = TypeVar('CHT', bound='Check') -GroupT = TypeVar('GroupT', bound='Group[Any, ..., Any]') - -if TYPE_CHECKING: - P = ParamSpec('P') -else: - P = TypeVar('P') - - -def unwrap_function(function: Callable[..., Any], /) -> Callable[..., Any]: - partial = functools.partial - while True: - if hasattr(function, '__wrapped__'): - function = function.__wrapped__ - elif isinstance(function, partial): - function = function.func - else: - return function - - -def get_signature_parameters( - function: Callable[..., Any], - globalns: Dict[str, Any], - /, - *, - skip_parameters: Optional[int] = None, -) -> Dict[str, Parameter]: - signature = Signature.from_callable(function) - params: Dict[str, Parameter] = {} - cache: Dict[str, Any] = {} - eval_annotation = discord.utils.evaluate_annotation - required_params = discord.utils.is_inside_class(function) + 1 if skip_parameters is None else skip_parameters - if len(signature.parameters) < required_params: - raise TypeError(f'Command signature requires at least {required_params - 1} parameter(s)') - - iterator = iter(signature.parameters.items()) - for _ in range(0, required_params): - next(iterator) - - for name, parameter in iterator: - default = parameter.default - if isinstance(default, Parameter): # update from the default - if default.annotation is not Parameter.empty: - # There are a few cases to care about here. - # x: TextChannel = commands.CurrentChannel - # x = commands.CurrentChannel - # In both of these cases, the default parameter has an explicit annotation - # but in the second case it's only used as the fallback. - if default._fallback: - if parameter.annotation is Parameter.empty: - parameter._annotation = default.annotation - else: - parameter._annotation = default.annotation - - parameter._default = default.default - parameter._description = default._description - parameter._displayed_default = default._displayed_default - parameter._displayed_name = default._displayed_name - - annotation = parameter.annotation - - if annotation is None: - params[name] = parameter.replace(annotation=type(None)) - continue - - annotation = eval_annotation(annotation, globalns, globalns, cache) - if annotation is Greedy: - raise TypeError('Unparameterized Greedy[...] is disallowed in signature.') - - params[name] = parameter.replace(annotation=annotation) - - return params - - -PARAMETER_HEADING_REGEX = re.compile(r'Parameters?\n---+\n', re.I) - - -def _fold_text(input: str) -> str: - """Turns a single newline into a space, and multiple newlines into a newline.""" - - def replacer(m: re.Match[str]) -> str: - if len(m.group()) <= 1: - return ' ' - return '\n' - - return re.sub(r'\n+', replacer, inspect.cleandoc(input)) - - -def extract_descriptions_from_docstring(function: Callable[..., Any], params: Dict[str, Parameter], /) -> Optional[str]: - docstring = inspect.getdoc(function) - - if docstring is None: - return None - - divide = PARAMETER_HEADING_REGEX.split(docstring, 1) - if len(divide) == 1: - return docstring - - description, param_docstring = divide - for match in NUMPY_DOCSTRING_ARG_REGEX.finditer(param_docstring): - name = match.group('name') - - if name not in params: - is_display_name = discord.utils.get(params.values(), displayed_name=name) - if is_display_name: - name = is_display_name.name - else: - continue - - param = params[name] - if param.description is None: - param._description = _fold_text(match.group('description')) - - return _fold_text(description.strip()) - - -def wrap_callback(coro: Callable[P, Coro[T]], /) -> Callable[P, Coro[Optional[T]]]: - @functools.wraps(coro) - async def wrapped(*args: P.args, **kwargs: P.kwargs) -> Optional[T]: - try: - ret = await coro(*args, **kwargs) - except CommandError: - raise - except asyncio.CancelledError: - return - except Exception as exc: - raise CommandInvokeError(exc) from exc - return ret - - return wrapped - - -def hooked_wrapped_callback( - command: Command[Any, ..., Any], ctx: Context[BotT], coro: Callable[P, Coro[T]], / -) -> Callable[P, Coro[Optional[T]]]: - @functools.wraps(coro) - async def wrapped(*args: P.args, **kwargs: P.kwargs) -> Optional[T]: - try: - ret = await coro(*args, **kwargs) - except CommandError: - ctx.command_failed = True - raise - except asyncio.CancelledError: - ctx.command_failed = True - return - except Exception as exc: - ctx.command_failed = True - raise CommandInvokeError(exc) from exc - finally: - if command._max_concurrency is not None: - await command._max_concurrency.release(ctx.message) - - await command.call_after_hooks(ctx) - return ret - - return wrapped - - -class _CaseInsensitiveDict(dict): - def __contains__(self, k): - return super().__contains__(k.casefold()) - - def __delitem__(self, k): - return super().__delitem__(k.casefold()) - - def __getitem__(self, k): - return super().__getitem__(k.casefold()) - - def get(self, k, default=None): - return super().get(k.casefold(), default) - - def pop(self, k, default=None): - return super().pop(k.casefold(), default) - - def __setitem__(self, k, v): - super().__setitem__(k.casefold(), v) - - -class _AttachmentIterator: - def __init__(self, data: List[discord.Attachment]): - self.data: List[discord.Attachment] = data - self.index: int = 0 - - def __iter__(self) -> Self: - return self - - def __next__(self) -> discord.Attachment: - try: - value = self.data[self.index] - except IndexError: - raise StopIteration - else: - self.index += 1 - return value - - def is_empty(self) -> bool: - return self.index >= len(self.data) - - -class Command(_BaseCommand, Generic[CogT, P, T]): - r"""A class that implements the protocol for a bot text command. - - These are not created manually, instead they are created via the - decorator or functional interface. - - Attributes - ----------- - name: :class:`str` - The name of the command. - callback: :ref:`coroutine ` - The coroutine that is executed when the command is called. - help: Optional[:class:`str`] - The long help text for the command. - brief: Optional[:class:`str`] - The short help text for the command. - usage: Optional[:class:`str`] - A replacement for arguments in the default help text. - aliases: Union[List[:class:`str`], Tuple[:class:`str`]] - The list of aliases the command can be invoked under. - enabled: :class:`bool` - A boolean that indicates if the command is currently enabled. - If the command is invoked while it is disabled, then - :exc:`.DisabledCommand` is raised to the :func:`.on_command_error` - event. Defaults to ``True``. - parent: Optional[:class:`Group`] - The parent group that this command belongs to. ``None`` if there - isn't one. - cog: Optional[:class:`Cog`] - The cog that this command belongs to. ``None`` if there isn't one. - checks: List[Callable[[:class:`.Context`], :class:`bool`]] - A list of predicates that verifies if the command could be executed - with the given :class:`.Context` as the sole parameter. If an exception - is necessary to be thrown to signal failure, then one inherited from - :exc:`.CommandError` should be used. Note that if the checks fail then - :exc:`.CheckFailure` exception is raised to the :func:`.on_command_error` - event. - description: :class:`str` - The message prefixed into the default help command. - hidden: :class:`bool` - If ``True``\, the default help command does not show this in the - help output. - rest_is_raw: :class:`bool` - If ``False`` and a keyword-only argument is provided then the keyword - only argument is stripped and handled as if it was a regular argument - that handles :exc:`.MissingRequiredArgument` and default values in a - regular matter rather than passing the rest completely raw. If ``True`` - then the keyword-only argument will pass in the rest of the arguments - in a completely raw matter. Defaults to ``False``. - invoked_subcommand: Optional[:class:`Command`] - The subcommand that was invoked, if any. - require_var_positional: :class:`bool` - If ``True`` and a variadic positional argument is specified, requires - the user to specify at least one argument. Defaults to ``False``. - - .. versionadded:: 1.5 - - ignore_extra: :class:`bool` - If ``True``\, ignores extraneous strings passed to a command if all its - requirements are met (e.g. ``?foo a b c`` when only expecting ``a`` - and ``b``). Otherwise :func:`.on_command_error` and local error handlers - are called with :exc:`.TooManyArguments`. Defaults to ``True``. - cooldown_after_parsing: :class:`bool` - If ``True``\, cooldown processing is done after argument parsing, - which calls converters. If ``False`` then cooldown processing is done - first and then the converters are called second. Defaults to ``False``. - extras: :class:`dict` - A dict of user provided extras to attach to the Command. - - .. note:: - This object may be copied by the library. - - - .. versionadded:: 2.0 - """ - __original_kwargs__: Dict[str, Any] - - def __new__(cls, *args: Any, **kwargs: Any) -> Self: - # if you're wondering why this is done, it's because we need to ensure - # we have a complete original copy of **kwargs even for classes that - # mess with it by popping before delegating to the subclass __init__. - # In order to do this, we need to control the instance creation and - # inject the original kwargs through __new__ rather than doing it - # inside __init__. - self = super().__new__(cls) - - # we do a shallow copy because it's probably the most common use case. - # this could potentially break if someone modifies a list or something - # while it's in movement, but for now this is the cheapest and - # fastest way to do what we want. - self.__original_kwargs__ = kwargs.copy() - return self - - def __init__( - self, - func: Union[ - Callable[Concatenate[CogT, Context[Any], P], Coro[T]], - Callable[Concatenate[Context[Any], P], Coro[T]], - ], - /, - **kwargs: Any, - ) -> None: - if not asyncio.iscoroutinefunction(func): - raise TypeError('Callback must be a coroutine.') - - name = kwargs.get('name') or func.__name__ - if not isinstance(name, str): - raise TypeError('Name of a command must be a string.') - self.name: str = name - - self.callback = func - self.enabled: bool = kwargs.get('enabled', True) - - help_doc = kwargs.get('help') - if help_doc is not None: - help_doc = inspect.cleandoc(help_doc) - else: - help_doc = extract_descriptions_from_docstring(func, self.params) - - self.help: Optional[str] = help_doc - - self.brief: Optional[str] = kwargs.get('brief') - self.usage: Optional[str] = kwargs.get('usage') - self.rest_is_raw: bool = kwargs.get('rest_is_raw', False) - self.aliases: Union[List[str], Tuple[str]] = kwargs.get('aliases', []) - self.extras: Dict[Any, Any] = kwargs.get('extras', {}) - - if not isinstance(self.aliases, (list, tuple)): - raise TypeError("Aliases of a command must be a list or a tuple of strings.") - - self.description: str = inspect.cleandoc(kwargs.get('description', '')) - self.hidden: bool = kwargs.get('hidden', False) - - try: - checks = func.__commands_checks__ - checks.reverse() - except AttributeError: - checks = kwargs.get('checks', []) - - self.checks: List[UserCheck[Context[Any]]] = checks - - try: - cooldown = func.__commands_cooldown__ - except AttributeError: - cooldown = kwargs.get('cooldown') - - if cooldown is None: - buckets = CooldownMapping(cooldown, BucketType.default) - elif isinstance(cooldown, CooldownMapping): - buckets: CooldownMapping[Context[Any]] = cooldown - else: - raise TypeError("Cooldown must be an instance of CooldownMapping or None.") - self._buckets: CooldownMapping[Context[Any]] = buckets - - try: - max_concurrency = func.__commands_max_concurrency__ - except AttributeError: - max_concurrency = kwargs.get('max_concurrency') - - self._max_concurrency: Optional[MaxConcurrency] = max_concurrency - - self.require_var_positional: bool = kwargs.get('require_var_positional', False) - self.ignore_extra: bool = kwargs.get('ignore_extra', True) - self.cooldown_after_parsing: bool = kwargs.get('cooldown_after_parsing', False) - self._cog: CogT = None # type: ignore # This breaks every other pyright release - - # bandaid for the fact that sometimes parent can be the bot instance - parent: Optional[GroupMixin[Any]] = kwargs.get('parent') - self.parent: Optional[GroupMixin[Any]] = parent if isinstance(parent, _BaseCommand) else None # type: ignore # Does not recognise mixin usage - - self._before_invoke: Optional[Hook] = None - try: - before_invoke = func.__before_invoke__ - except AttributeError: - pass - else: - self.before_invoke(before_invoke) - - self._after_invoke: Optional[Hook] = None - try: - after_invoke = func.__after_invoke__ - except AttributeError: - pass - else: - self.after_invoke(after_invoke) - - @property - def cog(self) -> CogT: - return self._cog - - @cog.setter - def cog(self, value: CogT) -> None: - self._cog = value - - @property - def callback( - self, - ) -> Union[Callable[Concatenate[CogT, Context[Any], P], Coro[T]], Callable[Concatenate[Context[Any], P], Coro[T]],]: - return self._callback - - @callback.setter - def callback( - self, - function: Union[ - Callable[Concatenate[CogT, Context[Any], P], Coro[T]], - Callable[Concatenate[Context[Any], P], Coro[T]], - ], - ) -> None: - self._callback = function - unwrap = unwrap_function(function) - self.module: str = unwrap.__module__ - - try: - globalns = unwrap.__globals__ - except AttributeError: - globalns = {} - - self.params: Dict[str, Parameter] = get_signature_parameters(function, globalns) - - def add_check(self, func: UserCheck[Context[Any]], /) -> None: - """Adds a check to the command. - - This is the non-decorator interface to :func:`.check`. - - .. versionadded:: 1.3 - - .. versionchanged:: 2.0 - - ``func`` parameter is now positional-only. - - .. seealso:: The :func:`~discord.ext.commands.check` decorator - - Parameters - ----------- - func - The function that will be used as a check. - """ - - self.checks.append(func) - - def remove_check(self, func: UserCheck[Context[Any]], /) -> None: - """Removes a check from the command. - - This function is idempotent and will not raise an exception - if the function is not in the command's checks. - - .. versionadded:: 1.3 - - .. versionchanged:: 2.0 - - ``func`` parameter is now positional-only. - - Parameters - ----------- - func - The function to remove from the checks. - """ - - try: - self.checks.remove(func) - except ValueError: - pass - - def update(self, **kwargs: Any) -> None: - """Updates :class:`Command` instance with updated attribute. - - This works similarly to the :func:`~discord.ext.commands.command` decorator in terms - of parameters in that they are passed to the :class:`Command` or - subclass constructors, sans the name and callback. - """ - cog = self.cog - self.__init__(self.callback, **dict(self.__original_kwargs__, **kwargs)) - self.cog = cog - - async def __call__(self, context: Context[BotT], /, *args: P.args, **kwargs: P.kwargs) -> T: - """|coro| - - Calls the internal callback that the command holds. - - .. note:: - - This bypasses all mechanisms -- including checks, converters, - invoke hooks, cooldowns, etc. You must take care to pass - the proper arguments and types to this function. - - .. versionadded:: 1.3 - - .. versionchanged:: 2.0 - - ``context`` parameter is now positional-only. - """ - if self.cog is not None: - return await self.callback(self.cog, context, *args, **kwargs) # type: ignore - else: - return await self.callback(context, *args, **kwargs) # type: ignore - - def _ensure_assignment_on_copy(self, other: Self) -> Self: - other._before_invoke = self._before_invoke - other._after_invoke = self._after_invoke - other.extras = self.extras - if self.checks != other.checks: - other.checks = self.checks.copy() - if self._buckets.valid and not other._buckets.valid: - other._buckets = self._buckets.copy() - if self._max_concurrency and self._max_concurrency != other._max_concurrency: - other._max_concurrency = self._max_concurrency.copy() - - try: - other.on_error = self.on_error - except AttributeError: - pass - return other - - def copy(self) -> Self: - """Creates a copy of this command. - - Returns - -------- - :class:`Command` - A new instance of this command. - """ - ret = self.__class__(self.callback, **self.__original_kwargs__) - return self._ensure_assignment_on_copy(ret) - - def _update_copy(self, kwargs: Dict[str, Any]) -> Self: - if kwargs: - kw = kwargs.copy() - kw.update(self.__original_kwargs__) - copy = self.__class__(self.callback, **kw) - return self._ensure_assignment_on_copy(copy) - else: - return self.copy() - - async def dispatch_error(self, ctx: Context[BotT], error: CommandError, /) -> None: - ctx.command_failed = True - cog = self.cog - try: - coro = self.on_error - except AttributeError: - pass - else: - injected = wrap_callback(coro) # type: ignore - if cog is not None: - await injected(cog, ctx, error) - else: - await injected(ctx, error) # type: ignore - - try: - if cog is not None: - local = Cog._get_overridden_method(cog.cog_command_error) - if local is not None: - wrapped = wrap_callback(local) - await wrapped(ctx, error) - finally: - ctx.bot.dispatch('command_error', ctx, error) - - async def transform(self, ctx: Context[BotT], param: Parameter, attachments: _AttachmentIterator, /) -> Any: - converter = param.converter - consume_rest_is_special = param.kind == param.KEYWORD_ONLY and not self.rest_is_raw - view = ctx.view - view.skip_ws() - - # The greedy converter is simple -- it keeps going until it fails in which case, - # it undos the view ready for the next parameter to use instead - if isinstance(converter, Greedy): - # Special case for Greedy[discord.Attachment] to consume the attachments iterator - if converter.converter is discord.Attachment: - return list(attachments) - - if param.kind in (param.POSITIONAL_OR_KEYWORD, param.POSITIONAL_ONLY): - return await self._transform_greedy_pos(ctx, param, param.required, converter.constructed_converter) - elif param.kind == param.VAR_POSITIONAL: - return await self._transform_greedy_var_pos(ctx, param, converter.constructed_converter) - else: - # if we're here, then it's a KEYWORD_ONLY param type - # since this is mostly useless, we'll helpfully transform Greedy[X] - # into just X and do the parsing that way. - converter = converter.constructed_converter - - # Try to detect Optional[discord.Attachment] or discord.Attachment special converter - if converter is discord.Attachment: - try: - return next(attachments) - except StopIteration: - raise MissingRequiredAttachment(param) - - if self._is_typing_optional(param.annotation) and param.annotation.__args__[0] is discord.Attachment: - if attachments.is_empty(): - # I have no idea who would be doing Optional[discord.Attachment] = 1 - # but for those cases then 1 should be returned instead of None - return None if param.default is param.empty else param.default - return next(attachments) - - if view.eof: - if param.kind == param.VAR_POSITIONAL: - raise RuntimeError() # break the loop - if param.required: - if self._is_typing_optional(param.annotation): - return None - if hasattr(converter, '__commands_is_flag__') and converter._can_be_constructible(): - return await converter._construct_default(ctx) - raise MissingRequiredArgument(param) - return await param.get_default(ctx) - - previous = view.index - if consume_rest_is_special: - ctx.current_argument = argument = view.read_rest().strip() - else: - try: - ctx.current_argument = argument = view.get_quoted_word() - except ArgumentParsingError as exc: - if self._is_typing_optional(param.annotation): - view.index = previous - return None if param.required else await param.get_default(ctx) - else: - raise exc - view.previous = previous - - # type-checker fails to narrow argument - return await run_converters(ctx, converter, argument, param) # type: ignore - - async def _transform_greedy_pos(self, ctx: Context[BotT], param: Parameter, required: bool, converter: Any) -> Any: - view = ctx.view - result = [] - while not view.eof: - # for use with a manual undo - previous = view.index - - view.skip_ws() - try: - ctx.current_argument = argument = view.get_quoted_word() - value = await run_converters(ctx, converter, argument, param) # type: ignore - except (CommandError, ArgumentParsingError): - view.index = previous - break - else: - result.append(value) - - if not result and not required: - return await param.get_default(ctx) - return result - - async def _transform_greedy_var_pos(self, ctx: Context[BotT], param: Parameter, converter: Any) -> Any: - view = ctx.view - previous = view.index - try: - ctx.current_argument = argument = view.get_quoted_word() - value = await run_converters(ctx, converter, argument, param) # type: ignore - except (CommandError, ArgumentParsingError): - view.index = previous - raise RuntimeError() from None # break loop - else: - return value - - @property - def clean_params(self) -> Dict[str, Parameter]: - """Dict[:class:`str`, :class:`Parameter`]: - Retrieves the parameter dictionary without the context or self parameters. - - Useful for inspecting signature. - """ - return self.params.copy() - - @property - def cooldown(self) -> Optional[Cooldown]: - """Optional[:class:`~discord.app_commands.Cooldown`]: The cooldown of a command when invoked - or ``None`` if the command doesn't have a registered cooldown. - - .. versionadded:: 2.0 - """ - return self._buckets._cooldown - - @property - def full_parent_name(self) -> str: - """:class:`str`: Retrieves the fully qualified parent command name. - - This the base command name required to execute it. For example, - in ``?one two three`` the parent name would be ``one two``. - """ - entries = [] - command = self - # command.parent is type-hinted as GroupMixin some attributes are resolved via MRO - while command.parent is not None: # type: ignore - command = command.parent # type: ignore - entries.append(command.name) # type: ignore - - return ' '.join(reversed(entries)) - - @property - def parents(self) -> List[Group[Any, ..., Any]]: - """List[:class:`Group`]: Retrieves the parents of this command. - - If the command has no parents then it returns an empty :class:`list`. - - For example in commands ``?a b c test``, the parents are ``[c, b, a]``. - - .. versionadded:: 1.1 - """ - entries = [] - command = self - while command.parent is not None: # type: ignore - command = command.parent # type: ignore - entries.append(command) - - return entries - - @property - def root_parent(self) -> Optional[Group[Any, ..., Any]]: - """Optional[:class:`Group`]: Retrieves the root parent of this command. - - If the command has no parents then it returns ``None``. - - For example in commands ``?a b c test``, the root parent is ``a``. - """ - if not self.parent: - return None - return self.parents[-1] - - @property - def qualified_name(self) -> str: - """:class:`str`: Retrieves the fully qualified command name. - - This is the full parent name with the command name as well. - For example, in ``?one two three`` the qualified name would be - ``one two three``. - """ - - parent = self.full_parent_name - if parent: - return parent + ' ' + self.name - else: - return self.name - - def __str__(self) -> str: - return self.qualified_name - - async def _parse_arguments(self, ctx: Context[BotT]) -> None: - ctx.args = [ctx] if self.cog is None else [self.cog, ctx] - ctx.kwargs = {} - args = ctx.args - kwargs = ctx.kwargs - attachments = _AttachmentIterator(ctx.message.attachments) - - view = ctx.view - iterator = iter(self.params.items()) - - for name, param in iterator: - ctx.current_parameter = param - if param.kind in (param.POSITIONAL_OR_KEYWORD, param.POSITIONAL_ONLY): - transformed = await self.transform(ctx, param, attachments) - args.append(transformed) - elif param.kind == param.KEYWORD_ONLY: - # kwarg only param denotes "consume rest" semantics - if self.rest_is_raw: - ctx.current_argument = argument = view.read_rest() - kwargs[name] = await run_converters(ctx, param.converter, argument, param) - else: - kwargs[name] = await self.transform(ctx, param, attachments) - break - elif param.kind == param.VAR_POSITIONAL: - if view.eof and self.require_var_positional: - raise MissingRequiredArgument(param) - while not view.eof: - try: - transformed = await self.transform(ctx, param, attachments) - args.append(transformed) - except RuntimeError: - break - - if not self.ignore_extra and not view.eof: - raise TooManyArguments('Too many arguments passed to ' + self.qualified_name) - - async def call_before_hooks(self, ctx: Context[BotT], /) -> None: - # now that we're done preparing we can call the pre-command hooks - # first, call the command local hook: - cog = self.cog - if self._before_invoke is not None: - # should be cog if @commands.before_invoke is used - instance = getattr(self._before_invoke, '__self__', cog) - # __self__ only exists for methods, not functions - # however, if @command.before_invoke is used, it will be a function - if instance: - await self._before_invoke(instance, ctx) # type: ignore - else: - await self._before_invoke(ctx) # type: ignore - - # call the cog local hook if applicable: - if cog is not None: - hook = Cog._get_overridden_method(cog.cog_before_invoke) - if hook is not None: - await hook(ctx) - - # call the bot global hook if necessary - hook = ctx.bot._before_invoke - if hook is not None: - await hook(ctx) - - async def call_after_hooks(self, ctx: Context[BotT], /) -> None: - cog = self.cog - if self._after_invoke is not None: - instance = getattr(self._after_invoke, '__self__', cog) - if instance: - await self._after_invoke(instance, ctx) # type: ignore - else: - await self._after_invoke(ctx) # type: ignore - - # call the cog local hook if applicable: - if cog is not None: - hook = Cog._get_overridden_method(cog.cog_after_invoke) - if hook is not None: - await hook(ctx) - - hook = ctx.bot._after_invoke - if hook is not None: - await hook(ctx) - - def _prepare_cooldowns(self, ctx: Context[BotT]) -> None: - if self._buckets.valid: - dt = ctx.message.edited_at or ctx.message.created_at - current = dt.replace(tzinfo=datetime.timezone.utc).timestamp() - bucket = self._buckets.get_bucket(ctx, current) - if bucket is not None: - retry_after = bucket.update_rate_limit(current) - if retry_after: - raise CommandOnCooldown(bucket, retry_after, self._buckets.type) # type: ignore - - async def prepare(self, ctx: Context[BotT], /) -> None: - ctx.command = self - - if not await self.can_run(ctx): - raise CheckFailure(f'The check functions for command {self.qualified_name} failed.') - - if self._max_concurrency is not None: - # For this application, context can be duck-typed as a Message - await self._max_concurrency.acquire(ctx) - - try: - if self.cooldown_after_parsing: - await self._parse_arguments(ctx) - self._prepare_cooldowns(ctx) - else: - self._prepare_cooldowns(ctx) - await self._parse_arguments(ctx) - - await self.call_before_hooks(ctx) - except: - if self._max_concurrency is not None: - await self._max_concurrency.release(ctx) - raise - - def is_on_cooldown(self, ctx: Context[BotT], /) -> bool: - """Checks whether the command is currently on cooldown. - - .. versionchanged:: 2.0 - - ``ctx`` parameter is now positional-only. - - Parameters - ----------- - ctx: :class:`.Context` - The invocation context to use when checking the commands cooldown status. - - Returns - -------- - :class:`bool` - A boolean indicating if the command is on cooldown. - """ - if not self._buckets.valid: - return False - - bucket = self._buckets.get_bucket(ctx) - if bucket is None: - return False - dt = ctx.message.edited_at or ctx.message.created_at - current = dt.replace(tzinfo=datetime.timezone.utc).timestamp() - return bucket.get_tokens(current) == 0 - - def reset_cooldown(self, ctx: Context[BotT], /) -> None: - """Resets the cooldown on this command. - - .. versionchanged:: 2.0 - - ``ctx`` parameter is now positional-only. - - Parameters - ----------- - ctx: :class:`.Context` - The invocation context to reset the cooldown under. - """ - if self._buckets.valid: - bucket = self._buckets.get_bucket(ctx) - if bucket is not None: - bucket.reset() - - def get_cooldown_retry_after(self, ctx: Context[BotT], /) -> float: - """Retrieves the amount of seconds before this command can be tried again. - - .. versionadded:: 1.4 - - .. versionchanged:: 2.0 - - ``ctx`` parameter is now positional-only. - - Parameters - ----------- - ctx: :class:`.Context` - The invocation context to retrieve the cooldown from. - - Returns - -------- - :class:`float` - The amount of time left on this command's cooldown in seconds. - If this is ``0.0`` then the command isn't on cooldown. - """ - if self._buckets.valid: - bucket = self._buckets.get_bucket(ctx) - if bucket is None: - return 0.0 - dt = ctx.message.edited_at or ctx.message.created_at - current = dt.replace(tzinfo=datetime.timezone.utc).timestamp() - return bucket.get_retry_after(current) - - return 0.0 - - async def invoke(self, ctx: Context[BotT], /) -> None: - await self.prepare(ctx) - - # terminate the invoked_subcommand chain. - # since we're in a regular command (and not a group) then - # the invoked subcommand is None. - ctx.invoked_subcommand = None - ctx.subcommand_passed = None - injected = hooked_wrapped_callback(self, ctx, self.callback) # type: ignore - await injected(*ctx.args, **ctx.kwargs) # type: ignore - - async def reinvoke(self, ctx: Context[BotT], /, *, call_hooks: bool = False) -> None: - ctx.command = self - await self._parse_arguments(ctx) - - if call_hooks: - await self.call_before_hooks(ctx) - - ctx.invoked_subcommand = None - try: - await self.callback(*ctx.args, **ctx.kwargs) # type: ignore - except: - ctx.command_failed = True - raise - finally: - if call_hooks: - await self.call_after_hooks(ctx) - - def error(self, coro: Error[CogT, ContextT], /) -> Error[CogT, ContextT]: - """A decorator that registers a coroutine as a local error handler. - - A local error handler is an :func:`.on_command_error` event limited to - a single command. However, the :func:`.on_command_error` is still - invoked afterwards as the catch-all. - - .. versionchanged:: 2.0 - - ``coro`` parameter is now positional-only. - - Parameters - ----------- - coro: :ref:`coroutine ` - The coroutine to register as the local error handler. - - Raises - ------- - TypeError - The coroutine passed is not actually a coroutine. - """ - - if not asyncio.iscoroutinefunction(coro): - raise TypeError('The error handler must be a coroutine.') - - self.on_error: Error[CogT, Any] = coro - return coro - - def has_error_handler(self) -> bool: - """:class:`bool`: Checks whether the command has an error handler registered. - - .. versionadded:: 1.7 - """ - return hasattr(self, 'on_error') - - def before_invoke(self, coro: Hook[CogT, ContextT], /) -> Hook[CogT, ContextT]: - """A decorator that registers a coroutine as a pre-invoke hook. - - A pre-invoke hook is called directly before the command is - called. This makes it a useful function to set up database - connections or any type of set up required. - - This pre-invoke hook takes a sole parameter, a :class:`.Context`. - - See :meth:`.Bot.before_invoke` for more info. - - .. versionchanged:: 2.0 - - ``coro`` parameter is now positional-only. - - Parameters - ----------- - coro: :ref:`coroutine ` - The coroutine to register as the pre-invoke hook. - - Raises - ------- - TypeError - The coroutine passed is not actually a coroutine. - """ - if not asyncio.iscoroutinefunction(coro): - raise TypeError('The pre-invoke hook must be a coroutine.') - - self._before_invoke = coro - return coro - - def after_invoke(self, coro: Hook[CogT, ContextT], /) -> Hook[CogT, ContextT]: - """A decorator that registers a coroutine as a post-invoke hook. - - A post-invoke hook is called directly after the command is - called. This makes it a useful function to clean-up database - connections or any type of clean up required. - - This post-invoke hook takes a sole parameter, a :class:`.Context`. - - See :meth:`.Bot.after_invoke` for more info. - - .. versionchanged:: 2.0 - - ``coro`` parameter is now positional-only. - - Parameters - ----------- - coro: :ref:`coroutine ` - The coroutine to register as the post-invoke hook. - - Raises - ------- - TypeError - The coroutine passed is not actually a coroutine. - """ - if not asyncio.iscoroutinefunction(coro): - raise TypeError('The post-invoke hook must be a coroutine.') - - self._after_invoke = coro - return coro - - @property - def cog_name(self) -> Optional[str]: - """Optional[:class:`str`]: The name of the cog this command belongs to, if any.""" - return type(self.cog).__cog_name__ if self.cog is not None else None - - @property - def short_doc(self) -> str: - """:class:`str`: Gets the "short" documentation of a command. - - By default, this is the :attr:`.brief` attribute. - If that lookup leads to an empty string then the first line of the - :attr:`.help` attribute is used instead. - """ - if self.brief is not None: - return self.brief - if self.help is not None: - return self.help.split('\n', 1)[0] - return '' - - def _is_typing_optional(self, annotation: Union[T, Optional[T]]) -> bool: - return getattr(annotation, '__origin__', None) is Union and type(None) in annotation.__args__ # type: ignore - - @property - def signature(self) -> str: - """:class:`str`: Returns a POSIX-like signature useful for help command output.""" - if self.usage is not None: - return self.usage - - params = self.clean_params - if not params: - return '' - - result = [] - for param in params.values(): - name = param.displayed_name or param.name - - greedy = isinstance(param.converter, Greedy) - optional = False # postpone evaluation of if it's an optional argument - - annotation: Any = param.converter.converter if greedy else param.converter - origin = getattr(annotation, '__origin__', None) - if not greedy and origin is Union: - none_cls = type(None) - union_args = annotation.__args__ - optional = union_args[-1] is none_cls - if len(union_args) == 2 and optional: - annotation = union_args[0] - origin = getattr(annotation, '__origin__', None) - - if annotation is discord.Attachment: - # For discord.Attachment we need to signal to the user that it's an attachment - # It's not exactly pretty but it's enough to differentiate - if optional: - result.append(f'[{name} (upload a file)]') - elif greedy: - result.append(f'[{name} (upload files)]...') - else: - result.append(f'<{name} (upload a file)>') - continue - - # for typing.Literal[...], typing.Optional[typing.Literal[...]], and Greedy[typing.Literal[...]], the - # parameter signature is a literal list of it's values - if origin is Literal: - name = '|'.join(f'"{v}"' if isinstance(v, str) else str(v) for v in annotation.__args__) - if not param.required: - # We don't want None or '' to trigger the [name=value] case and instead it should - # do [name] since [name=None] or [name=] are not exactly useful for the user. - if param.displayed_default: - result.append( - f'[{name}={param.displayed_default}]' if not greedy else f'[{name}={param.displayed_default}]...' - ) - continue - else: - result.append(f'[{name}]') - - elif param.kind == param.VAR_POSITIONAL: - if self.require_var_positional: - result.append(f'<{name}...>') - else: - result.append(f'[{name}...]') - elif greedy: - result.append(f'[{name}]...') - elif optional: - result.append(f'[{name}]') - else: - result.append(f'<{name}>') - - return ' '.join(result) - - async def can_run(self, ctx: Context[BotT], /) -> bool: - """|coro| - - Checks if the command can be executed by checking all the predicates - inside the :attr:`~Command.checks` attribute. This also checks whether the - command is disabled. - - .. versionchanged:: 1.3 - Checks whether the command is disabled or not - - .. versionchanged:: 2.0 - - ``ctx`` parameter is now positional-only. - - Parameters - ----------- - ctx: :class:`.Context` - The ctx of the command currently being invoked. - - Raises - ------- - :class:`CommandError` - Any command error that was raised during a check call will be propagated - by this function. - - Returns - -------- - :class:`bool` - A boolean indicating if the command can be invoked. - """ - - if not self.enabled: - raise DisabledCommand(f'{self.name} command is disabled') - - original = ctx.command - ctx.command = self - - try: - if not await ctx.bot.can_run(ctx): - raise CheckFailure(f'The global check functions for command {self.qualified_name} failed.') - - cog = self.cog - if cog is not None: - local_check = Cog._get_overridden_method(cog.cog_check) - if local_check is not None: - ret = await discord.utils.maybe_coroutine(local_check, ctx) - if not ret: - return False - - predicates = self.checks - if not predicates: - # since we have no checks, then we just return True. - return True - - return await discord.utils.async_all(predicate(ctx) for predicate in predicates) - finally: - ctx.command = original - - -class GroupMixin(Generic[CogT]): - """A mixin that implements common functionality for classes that behave - similar to :class:`.Group` and are allowed to register commands. - - Attributes - ----------- - all_commands: :class:`dict` - A mapping of command name to :class:`.Command` - objects. - case_insensitive: :class:`bool` - Whether the commands should be case insensitive. Defaults to ``False``. - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - case_insensitive = kwargs.get('case_insensitive', False) - self.all_commands: Dict[str, Command[CogT, ..., Any]] = _CaseInsensitiveDict() if case_insensitive else {} - self.case_insensitive: bool = case_insensitive - super().__init__(*args, **kwargs) - - @property - def commands(self) -> Set[Command[CogT, ..., Any]]: - """Set[:class:`.Command`]: A unique set of commands without aliases that are registered.""" - return set(self.all_commands.values()) - - def recursively_remove_all_commands(self) -> None: - for command in self.all_commands.copy().values(): - if isinstance(command, GroupMixin): - command.recursively_remove_all_commands() - self.remove_command(command.name) - - def add_command(self, command: Command[CogT, ..., Any], /) -> None: - """Adds a :class:`.Command` into the internal list of commands. - - This is usually not called, instead the :meth:`~.GroupMixin.command` or - :meth:`~.GroupMixin.group` shortcut decorators are used instead. - - .. versionchanged:: 1.4 - Raise :exc:`.CommandRegistrationError` instead of generic :exc:`.ClientException` - - .. versionchanged:: 2.0 - - ``command`` parameter is now positional-only. - - Parameters - ----------- - command: :class:`Command` - The command to add. - - Raises - ------- - CommandRegistrationError - If the command or its alias is already registered by different command. - TypeError - If the command passed is not a subclass of :class:`.Command`. - """ - - if not isinstance(command, Command): - raise TypeError('The command passed must be a subclass of Command') - - if isinstance(self, Command): - command.parent = self - - if command.name in self.all_commands: - raise CommandRegistrationError(command.name) - - self.all_commands[command.name] = command - for alias in command.aliases: - if alias in self.all_commands: - self.remove_command(command.name) - raise CommandRegistrationError(alias, alias_conflict=True) - self.all_commands[alias] = command - - def remove_command(self, name: str, /) -> Optional[Command[CogT, ..., Any]]: - """Remove a :class:`.Command` from the internal list - of commands. - - This could also be used as a way to remove aliases. - - .. versionchanged:: 2.0 - - ``name`` parameter is now positional-only. - - Parameters - ----------- - name: :class:`str` - The name of the command to remove. - - Returns - -------- - Optional[:class:`.Command`] - The command that was removed. If the name is not valid then - ``None`` is returned instead. - """ - command = self.all_commands.pop(name, None) - - # does not exist - if command is None: - return None - - if name in command.aliases: - # we're removing an alias so we don't want to remove the rest - return command - - # we're not removing the alias so let's delete the rest of them. - for alias in command.aliases: - cmd = self.all_commands.pop(alias, None) - # in the case of a CommandRegistrationError, an alias might conflict - # with an already existing command. If this is the case, we want to - # make sure the pre-existing command is not removed. - if cmd is not None and cmd != command: - self.all_commands[alias] = cmd - return command - - def walk_commands(self) -> Generator[Command[CogT, ..., Any], None, None]: - """An iterator that recursively walks through all commands and subcommands. - - .. versionchanged:: 1.4 - Duplicates due to aliases are no longer returned - - Yields - ------ - Union[:class:`.Command`, :class:`.Group`] - A command or group from the internal list of commands. - """ - for command in self.commands: - yield command - if isinstance(command, GroupMixin): - yield from command.walk_commands() - - def get_command(self, name: str, /) -> Optional[Command[CogT, ..., Any]]: - """Get a :class:`.Command` from the internal list - of commands. - - This could also be used as a way to get aliases. - - The name could be fully qualified (e.g. ``'foo bar'``) will get - the subcommand ``bar`` of the group command ``foo``. If a - subcommand is not found then ``None`` is returned just as usual. - - .. versionchanged:: 2.0 - - ``name`` parameter is now positional-only. - - Parameters - ----------- - name: :class:`str` - The name of the command to get. - - Returns - -------- - Optional[:class:`Command`] - The command that was requested. If not found, returns ``None``. - """ - - # fast path, no space in name. - if ' ' not in name: - return self.all_commands.get(name) - - names = name.split() - if not names: - return None - obj = self.all_commands.get(names[0]) - if not isinstance(obj, GroupMixin): - return obj - - for name in names[1:]: - try: - obj = obj.all_commands[name] # type: ignore - except (AttributeError, KeyError): - return None - - return obj - - @overload - def command( - self: GroupMixin[CogT], - name: str = ..., - *args: Any, - **kwargs: Any, - ) -> Callable[ - [ - Union[ - Callable[Concatenate[CogT, ContextT, P], Coro[T]], - Callable[Concatenate[ContextT, P], Coro[T]], - ] - ], - Command[CogT, P, T], - ]: - ... - - @overload - def command( - self: GroupMixin[CogT], - name: str = ..., - cls: Type[CommandT] = ..., # type: ignore # previous overload handles case where cls is not set - *args: Any, - **kwargs: Any, - ) -> Callable[ - [ - Union[ - Callable[Concatenate[CogT, ContextT, P], Coro[T]], - Callable[Concatenate[ContextT, P], Coro[T]], - ] - ], - CommandT, - ]: - ... - - def command( - self, - name: str = MISSING, - cls: Type[Command[Any, ..., Any]] = MISSING, - *args: Any, - **kwargs: Any, - ) -> Any: - """A shortcut decorator that invokes :func:`~discord.ext.commands.command` and adds it to - the internal command list via :meth:`~.GroupMixin.add_command`. - - Returns - -------- - Callable[..., :class:`Command`] - A decorator that converts the provided method into a Command, adds it to the bot, then returns it. - """ - - def decorator(func): - - kwargs.setdefault('parent', self) - result = command(name=name, cls=cls, *args, **kwargs)(func) - self.add_command(result) - return result - - return decorator - - @overload - def group( - self: GroupMixin[CogT], - name: str = ..., - *args: Any, - **kwargs: Any, - ) -> Callable[ - [ - Union[ - Callable[Concatenate[CogT, ContextT, P], Coro[T]], - Callable[Concatenate[ContextT, P], Coro[T]], - ] - ], - Group[CogT, P, T], - ]: - ... - - @overload - def group( - self: GroupMixin[CogT], - name: str = ..., - cls: Type[GroupT] = ..., # type: ignore # previous overload handles case where cls is not set - *args: Any, - **kwargs: Any, - ) -> Callable[ - [ - Union[ - Callable[Concatenate[CogT, ContextT, P], Coro[T]], - Callable[Concatenate[ContextT, P], Coro[T]], - ] - ], - GroupT, - ]: - ... - - def group( - self, - name: str = MISSING, - cls: Type[Group[Any, ..., Any]] = MISSING, - *args: Any, - **kwargs: Any, - ) -> Any: - """A shortcut decorator that invokes :func:`.group` and adds it to - the internal command list via :meth:`~.GroupMixin.add_command`. - - Returns - -------- - Callable[..., :class:`Group`] - A decorator that converts the provided method into a Group, adds it to the bot, then returns it. - """ - - def decorator(func): - kwargs.setdefault('parent', self) - result = group(name=name, cls=cls, *args, **kwargs)(func) - self.add_command(result) - return result - - return decorator - - -class Group(GroupMixin[CogT], Command[CogT, P, T]): - """A class that implements a grouping protocol for commands to be - executed as subcommands. - - This class is a subclass of :class:`.Command` and thus all options - valid in :class:`.Command` are valid in here as well. - - Attributes - ----------- - invoke_without_command: :class:`bool` - Indicates if the group callback should begin parsing and - invocation only if no subcommand was found. Useful for - making it an error handling function to tell the user that - no subcommand was found or to have different functionality - in case no subcommand was found. If this is ``False``, then - the group callback will always be invoked first. This means - that the checks and the parsing dictated by its parameters - will be executed. Defaults to ``False``. - case_insensitive: :class:`bool` - Indicates if the group's commands should be case insensitive. - Defaults to ``False``. - """ - - def __init__(self, *args: Any, **attrs: Any) -> None: - self.invoke_without_command: bool = attrs.pop('invoke_without_command', False) - super().__init__(*args, **attrs) - - def copy(self) -> Self: - """Creates a copy of this :class:`Group`. - - Returns - -------- - :class:`Group` - A new instance of this group. - """ - ret = super().copy() - for cmd in self.commands: - ret.add_command(cmd.copy()) - return ret - - async def invoke(self, ctx: Context[BotT], /) -> None: - ctx.invoked_subcommand = None - ctx.subcommand_passed = None - early_invoke = not self.invoke_without_command - if early_invoke: - await self.prepare(ctx) - - view = ctx.view - previous = view.index - view.skip_ws() - trigger = view.get_word() - - if trigger: - ctx.subcommand_passed = trigger - ctx.invoked_subcommand = self.all_commands.get(trigger, None) - - if early_invoke: - injected = hooked_wrapped_callback(self, ctx, self.callback) # type: ignore - await injected(*ctx.args, **ctx.kwargs) # type: ignore - - ctx.invoked_parents.append(ctx.invoked_with) # type: ignore - - if trigger and ctx.invoked_subcommand: - ctx.invoked_with = trigger - await ctx.invoked_subcommand.invoke(ctx) - elif not early_invoke: - # undo the trigger parsing - view.index = previous - view.previous = previous - await super().invoke(ctx) - - async def reinvoke(self, ctx: Context[BotT], /, *, call_hooks: bool = False) -> None: - ctx.invoked_subcommand = None - early_invoke = not self.invoke_without_command - if early_invoke: - ctx.command = self - await self._parse_arguments(ctx) - - if call_hooks: - await self.call_before_hooks(ctx) - - view = ctx.view - previous = view.index - view.skip_ws() - trigger = view.get_word() - - if trigger: - ctx.subcommand_passed = trigger - ctx.invoked_subcommand = self.all_commands.get(trigger, None) - - if early_invoke: - try: - await self.callback(*ctx.args, **ctx.kwargs) # type: ignore - except: - ctx.command_failed = True - raise - finally: - if call_hooks: - await self.call_after_hooks(ctx) - - ctx.invoked_parents.append(ctx.invoked_with) # type: ignore - - if trigger and ctx.invoked_subcommand: - ctx.invoked_with = trigger - await ctx.invoked_subcommand.reinvoke(ctx, call_hooks=call_hooks) - elif not early_invoke: - # undo the trigger parsing - view.index = previous - view.previous = previous - await super().reinvoke(ctx, call_hooks=call_hooks) - - -# Decorators - -if TYPE_CHECKING: - # Using a class to emulate a function allows for overloading the inner function in the decorator. - - class _CommandDecorator: - @overload - def __call__(self, func: Callable[Concatenate[CogT, ContextT, P], Coro[T]], /) -> Command[CogT, P, T]: - ... - - @overload - def __call__(self, func: Callable[Concatenate[ContextT, P], Coro[T]], /) -> Command[None, P, T]: - ... - - def __call__(self, func: Callable[..., Coro[T]], /) -> Any: - ... - - class _GroupDecorator: - @overload - def __call__(self, func: Callable[Concatenate[CogT, ContextT, P], Coro[T]], /) -> Group[CogT, P, T]: - ... - - @overload - def __call__(self, func: Callable[Concatenate[ContextT, P], Coro[T]], /) -> Group[None, P, T]: - ... - - def __call__(self, func: Callable[..., Coro[T]], /) -> Any: - ... - - -@overload -def command( - name: str = ..., - **attrs: Any, -) -> _CommandDecorator: - ... - - -@overload -def command( - name: str = ..., - cls: Type[CommandT] = ..., # type: ignore # previous overload handles case where cls is not set - **attrs: Any, -) -> Callable[ - [ - Union[ - Callable[Concatenate[ContextT, P], Coro[Any]], - Callable[Concatenate[CogT, ContextT, P], Coro[Any]], # type: ignore # CogT is used here to allow covariance - ] - ], - CommandT, -]: - ... - - -def command( - name: str = MISSING, - cls: Type[Command[Any, ..., Any]] = MISSING, - **attrs: Any, -) -> Any: - """A decorator that transforms a function into a :class:`.Command` - or if called with :func:`.group`, :class:`.Group`. - - By default the ``help`` attribute is received automatically from the - docstring of the function and is cleaned up with the use of - ``inspect.cleandoc``. If the docstring is ``bytes``, then it is decoded - into :class:`str` using utf-8 encoding. - - All checks added using the :func:`.check` & co. decorators are added into - the function. There is no way to supply your own checks through this - decorator. - - Parameters - ----------- - name: :class:`str` - The name to create the command with. By default this uses the - function name unchanged. - cls - The class to construct with. By default this is :class:`.Command`. - You usually do not change this. - attrs - Keyword arguments to pass into the construction of the class denoted - by ``cls``. - - Raises - ------- - TypeError - If the function is not a coroutine or is already a command. - """ - if cls is MISSING: - cls = Command - - def decorator(func): - if isinstance(func, Command): - raise TypeError('Callback is already a command.') - return cls(func, name=name, **attrs) - - return decorator - - -@overload -def group( - name: str = ..., - **attrs: Any, -) -> _GroupDecorator: - ... - - -@overload -def group( - name: str = ..., - cls: Type[GroupT] = ..., # type: ignore # previous overload handles case where cls is not set - **attrs: Any, -) -> Callable[ - [ - Union[ - Callable[Concatenate[CogT, ContextT, P], Coro[Any]], # type: ignore # CogT is used here to allow covariance - Callable[Concatenate[ContextT, P], Coro[Any]], - ] - ], - GroupT, -]: - ... - - -def group( - name: str = MISSING, - cls: Type[Group[Any, ..., Any]] = MISSING, - **attrs: Any, -) -> Any: - """A decorator that transforms a function into a :class:`.Group`. - - This is similar to the :func:`~discord.ext.commands.command` decorator but the ``cls`` - parameter is set to :class:`Group` by default. - - .. versionchanged:: 1.1 - The ``cls`` parameter can now be passed. - """ - if cls is MISSING: - cls = Group - - return command(name=name, cls=cls, **attrs) - - -def check(predicate: UserCheck[ContextT], /) -> Check[ContextT]: - r"""A decorator that adds a check to the :class:`.Command` or its - subclasses. These checks could be accessed via :attr:`.Command.checks`. - - These checks should be predicates that take in a single parameter taking - a :class:`.Context`. If the check returns a ``False``\-like value then - during invocation a :exc:`.CheckFailure` exception is raised and sent to - the :func:`.on_command_error` event. - - If an exception should be thrown in the predicate then it should be a - subclass of :exc:`.CommandError`. Any exception not subclassed from it - will be propagated while those subclassed will be sent to - :func:`.on_command_error`. - - A special attribute named ``predicate`` is bound to the value - returned by this decorator to retrieve the predicate passed to the - decorator. This allows the following introspection and chaining to be done: - - .. code-block:: python3 - - def owner_or_permissions(**perms): - original = commands.has_permissions(**perms).predicate - async def extended_check(ctx): - if ctx.guild is None: - return False - return ctx.guild.owner_id == ctx.author.id or await original(ctx) - return commands.check(extended_check) - - .. note:: - - The function returned by ``predicate`` is **always** a coroutine, - even if the original function was not a coroutine. - - .. versionchanged:: 1.3 - The ``predicate`` attribute was added. - - Examples - --------- - - Creating a basic check to see if the command invoker is you. - - .. code-block:: python3 - - def check_if_it_is_me(ctx): - return ctx.message.author.id == 85309593344815104 - - @bot.command() - @commands.check(check_if_it_is_me) - async def only_for_me(ctx): - await ctx.send('I know you!') - - Transforming common checks into its own decorator: - - .. code-block:: python3 - - def is_me(): - def predicate(ctx): - return ctx.message.author.id == 85309593344815104 - return commands.check(predicate) - - @bot.command() - @is_me() - async def only_me(ctx): - await ctx.send('Only you!') - - .. versionchanged:: 2.0 - - ``predicate`` parameter is now positional-only. - - Parameters - ----------- - predicate: Callable[[:class:`Context`], :class:`bool`] - The predicate to check if the command should be invoked. - """ - - def decorator(func: Union[Command[Any, ..., Any], CoroFunc]) -> Union[Command[Any, ..., Any], CoroFunc]: - if isinstance(func, Command): - func.checks.append(predicate) # type: ignore - else: - if not hasattr(func, '__commands_checks__'): - func.__commands_checks__ = [] - - func.__commands_checks__.append(predicate) - - return func - - if inspect.iscoroutinefunction(predicate): - decorator.predicate = predicate - else: - - @functools.wraps(predicate) - async def wrapper(ctx: ContextT): - return predicate(ctx) - - decorator.predicate = wrapper - - return decorator # type: ignore - - -def check_any(*checks: Check[ContextT]) -> Check[ContextT]: - r"""A :func:`check` that is added that checks if any of the checks passed - will pass, i.e. using logical OR. - - If all checks fail then :exc:`.CheckAnyFailure` is raised to signal the failure. - It inherits from :exc:`.CheckFailure`. - - .. note:: - - The ``predicate`` attribute for this function **is** a coroutine. - - .. versionadded:: 1.3 - - Parameters - ------------ - \*checks: Callable[[:class:`Context`], :class:`bool`] - An argument list of checks that have been decorated with - the :func:`check` decorator. - - Raises - ------- - TypeError - A check passed has not been decorated with the :func:`check` - decorator. - - Examples - --------- - - Creating a basic check to see if it's the bot owner or - the server owner: - - .. code-block:: python3 - - def is_guild_owner(): - def predicate(ctx): - return ctx.guild is not None and ctx.guild.owner_id == ctx.author.id - return commands.check(predicate) - - @bot.command() - @commands.check_any(commands.is_owner(), is_guild_owner()) - async def only_for_owners(ctx): - await ctx.send('Hello mister owner!') - """ - - unwrapped = [] - for wrapped in checks: - try: - pred = wrapped.predicate - except AttributeError: - raise TypeError(f'{wrapped!r} must be wrapped by commands.check decorator') from None - else: - unwrapped.append(pred) - - async def predicate(ctx: Context[BotT]) -> bool: - errors = [] - for func in unwrapped: - try: - value = await func(ctx) - except CheckFailure as e: - errors.append(e) - else: - if value: - return True - # if we're here, all checks failed - raise CheckAnyFailure(unwrapped, errors) - - return check(predicate) # type: ignore - - -def has_role(item: Union[int, str], /) -> Check[Any]: - """A :func:`.check` that is added that checks if the member invoking the - command has the role specified via the name or ID specified. - - If a string is specified, you must give the exact name of the role, including - caps and spelling. - - If an integer is specified, you must give the exact snowflake ID of the role. - - If the message is invoked in a private message context then the check will - return ``False``. - - This check raises one of two special exceptions, :exc:`.MissingRole` if the user - is missing a role, or :exc:`.NoPrivateMessage` if it is used in a private message. - Both inherit from :exc:`.CheckFailure`. - - .. versionchanged:: 1.1 - - Raise :exc:`.MissingRole` or :exc:`.NoPrivateMessage` - instead of generic :exc:`.CheckFailure` - - .. versionchanged:: 2.0 - - ``item`` parameter is now positional-only. - - Parameters - ----------- - item: Union[:class:`int`, :class:`str`] - The name or ID of the role to check. - """ - - def predicate(ctx: Context[BotT]) -> bool: - if ctx.guild is None: - raise NoPrivateMessage() - - # ctx.guild is None doesn't narrow ctx.author to Member - if isinstance(item, int): - role = ctx.author.get_role(item) # type: ignore - else: - role = discord.utils.get(ctx.author.roles, name=item) # type: ignore - if role is None: - raise MissingRole(item) - return True - - return check(predicate) - - -def has_any_role(*items: Union[int, str]) -> Callable[[T], T]: - r"""A :func:`.check` that is added that checks if the member invoking the - command has **any** of the roles specified. This means that if they have - one out of the three roles specified, then this check will return ``True``. - - Similar to :func:`.has_role`\, the names or IDs passed in must be exact. - - This check raises one of two special exceptions, :exc:`.MissingAnyRole` if the user - is missing all roles, or :exc:`.NoPrivateMessage` if it is used in a private message. - Both inherit from :exc:`.CheckFailure`. - - .. versionchanged:: 1.1 - - Raise :exc:`.MissingAnyRole` or :exc:`.NoPrivateMessage` - instead of generic :exc:`.CheckFailure` - - Parameters - ----------- - items: List[Union[:class:`str`, :class:`int`]] - An argument list of names or IDs to check that the member has roles wise. - - Example - -------- - - .. code-block:: python3 - - @bot.command() - @commands.has_any_role('Library Devs', 'Moderators', 492212595072434186) - async def cool(ctx): - await ctx.send('You are cool indeed') - """ - - def predicate(ctx): - if ctx.guild is None: - raise NoPrivateMessage() - - # ctx.guild is None doesn't narrow ctx.author to Member - if any( - ctx.author.get_role(item) is not None - if isinstance(item, int) - else discord.utils.get(ctx.author.roles, name=item) is not None - for item in items - ): - return True - raise MissingAnyRole(list(items)) - - return check(predicate) - - -def bot_has_role(item: int, /) -> Callable[[T], T]: - """Similar to :func:`.has_role` except checks if the bot itself has the - role. - - This check raises one of two special exceptions, :exc:`.BotMissingRole` if the bot - is missing the role, or :exc:`.NoPrivateMessage` if it is used in a private message. - Both inherit from :exc:`.CheckFailure`. - - .. versionchanged:: 1.1 - - Raise :exc:`.BotMissingRole` or :exc:`.NoPrivateMessage` - instead of generic :exc:`.CheckFailure` - - .. versionchanged:: 2.0 - - ``item`` parameter is now positional-only. - """ - - def predicate(ctx): - if ctx.guild is None: - raise NoPrivateMessage() - - if isinstance(item, int): - role = ctx.me.get_role(item) - else: - role = discord.utils.get(ctx.me.roles, name=item) - if role is None: - raise BotMissingRole(item) - return True - - return check(predicate) - - -def bot_has_any_role(*items: int) -> Callable[[T], T]: - """Similar to :func:`.has_any_role` except checks if the bot itself has - any of the roles listed. - - This check raises one of two special exceptions, :exc:`.BotMissingAnyRole` if the bot - is missing all roles, or :exc:`.NoPrivateMessage` if it is used in a private message. - Both inherit from :exc:`.CheckFailure`. - - .. versionchanged:: 1.1 - - Raise :exc:`.BotMissingAnyRole` or :exc:`.NoPrivateMessage` - instead of generic checkfailure - """ - - def predicate(ctx): - if ctx.guild is None: - raise NoPrivateMessage() - - me = ctx.me - if any( - me.get_role(item) is not None if isinstance(item, int) else discord.utils.get(me.roles, name=item) is not None - for item in items - ): - return True - raise BotMissingAnyRole(list(items)) - - return check(predicate) - - -def has_permissions(**perms: bool) -> Check[Any]: - """A :func:`.check` that is added that checks if the member has all of - the permissions necessary. - - Note that this check operates on the current channel permissions, not the - guild wide permissions. - - The permissions passed in must be exactly like the properties shown under - :class:`.discord.Permissions`. - - This check raises a special exception, :exc:`.MissingPermissions` - that is inherited from :exc:`.CheckFailure`. - - Parameters - ------------ - perms - An argument list of permissions to check for. - - Example - --------- - - .. code-block:: python3 - - @bot.command() - @commands.has_permissions(manage_messages=True) - async def test(ctx): - await ctx.send('You can manage messages.') - - """ - - invalid = set(perms) - set(discord.Permissions.VALID_FLAGS) - if invalid: - raise TypeError(f"Invalid permission(s): {', '.join(invalid)}") - - def predicate(ctx: Context[BotT]) -> bool: - permissions = ctx.permissions - - missing = [perm for perm, value in perms.items() if getattr(permissions, perm) != value] - - if not missing: - return True - - raise MissingPermissions(missing) - - return check(predicate) - - -def bot_has_permissions(**perms: bool) -> Check[Any]: - """Similar to :func:`.has_permissions` except checks if the bot itself has - the permissions listed. - - This check raises a special exception, :exc:`.BotMissingPermissions` - that is inherited from :exc:`.CheckFailure`. - """ - - invalid = set(perms) - set(discord.Permissions.VALID_FLAGS) - if invalid: - raise TypeError(f"Invalid permission(s): {', '.join(invalid)}") - - def predicate(ctx: Context[BotT]) -> bool: - permissions = ctx.bot_permissions - - missing = [perm for perm, value in perms.items() if getattr(permissions, perm) != value] - - if not missing: - return True - - raise BotMissingPermissions(missing) - - return check(predicate) - - -def has_guild_permissions(**perms: bool) -> Check[Any]: - """Similar to :func:`.has_permissions`, but operates on guild wide - permissions instead of the current channel permissions. - - If this check is called in a DM context, it will raise an - exception, :exc:`.NoPrivateMessage`. - - .. versionadded:: 1.3 - """ - - invalid = set(perms) - set(discord.Permissions.VALID_FLAGS) - if invalid: - raise TypeError(f"Invalid permission(s): {', '.join(invalid)}") - - def predicate(ctx: Context[BotT]) -> bool: - if not ctx.guild: - raise NoPrivateMessage - - permissions = ctx.author.guild_permissions # type: ignore - missing = [perm for perm, value in perms.items() if getattr(permissions, perm) != value] - - if not missing: - return True - - raise MissingPermissions(missing) - - return check(predicate) - - -def bot_has_guild_permissions(**perms: bool) -> Check[Any]: - """Similar to :func:`.has_guild_permissions`, but checks the bot - members guild permissions. - - .. versionadded:: 1.3 - """ - - invalid = set(perms) - set(discord.Permissions.VALID_FLAGS) - if invalid: - raise TypeError(f"Invalid permission(s): {', '.join(invalid)}") - - def predicate(ctx: Context[BotT]) -> bool: - if not ctx.guild: - raise NoPrivateMessage - - permissions = ctx.me.guild_permissions # type: ignore - missing = [perm for perm, value in perms.items() if getattr(permissions, perm) != value] - - if not missing: - return True - - raise BotMissingPermissions(missing) - - return check(predicate) - - -def dm_only() -> Check[Any]: - """A :func:`.check` that indicates this command must only be used in a - DM context. Only private messages are allowed when - using the command. - - This check raises a special exception, :exc:`.PrivateMessageOnly` - that is inherited from :exc:`.CheckFailure`. - - .. versionadded:: 1.1 - """ - - def predicate(ctx: Context[BotT]) -> bool: - if ctx.guild is not None: - raise PrivateMessageOnly() - return True - - return check(predicate) - - -def guild_only() -> Check[Any]: - """A :func:`.check` that indicates this command must only be used in a - guild context only. Basically, no private messages are allowed when - using the command. - - This check raises a special exception, :exc:`.NoPrivateMessage` - that is inherited from :exc:`.CheckFailure`. - - If used on hybrid commands, this will be equivalent to the - :func:`discord.app_commands.guild_only` decorator. In an unsupported - context, such as a subcommand, this will still fallback to applying the - check. - """ - - # Due to implementation quirks, this check has to be re-implemented completely - # to work with both app_commands and the command framework. - - def predicate(ctx: Context[BotT]) -> bool: - if ctx.guild is None: - raise NoPrivateMessage() - return True - - def decorator(func: Union[Command, CoroFunc]) -> Union[Command, CoroFunc]: - if isinstance(func, Command): - func.checks.append(predicate) - if hasattr(func, '__commands_is_hybrid__'): - app_command = getattr(func, 'app_command', None) - if app_command: - app_command.guild_only = True - else: - if not hasattr(func, '__commands_checks__'): - func.__commands_checks__ = [] - - func.__commands_checks__.append(predicate) - func.__discord_app_commands_guild_only__ = True - - return func - - if inspect.iscoroutinefunction(predicate): - decorator.predicate = predicate - else: - - @functools.wraps(predicate) - async def wrapper(ctx: Context[BotT]): - return predicate(ctx) - - decorator.predicate = wrapper - - return decorator # type: ignore - - -def is_owner() -> Check[Any]: - """A :func:`.check` that checks if the person invoking this command is the - owner of the bot. - - This is powered by :meth:`.Bot.is_owner`. - - This check raises a special exception, :exc:`.NotOwner` that is derived - from :exc:`.CheckFailure`. - """ - - async def predicate(ctx: Context[BotT]) -> bool: - if not await ctx.bot.is_owner(ctx.author): - raise NotOwner('You do not own this bot.') - return True - - return check(predicate) - - -def is_nsfw() -> Check[Any]: - """A :func:`.check` that checks if the channel is a NSFW channel. - - This check raises a special exception, :exc:`.NSFWChannelRequired` - that is derived from :exc:`.CheckFailure`. - - If used on hybrid commands, this will be equivalent to setting the - application command's ``nsfw`` attribute to ``True``. In an unsupported - context, such as a subcommand, this will still fallback to applying the - check. - - .. versionchanged:: 1.1 - - Raise :exc:`.NSFWChannelRequired` instead of generic :exc:`.CheckFailure`. - DM channels will also now pass this check. - """ - - # Due to implementation quirks, this check has to be re-implemented completely - # to work with both app_commands and the command framework. - - def predicate(ctx: Context[BotT]) -> bool: - ch = ctx.channel - if ctx.guild is None or ( - isinstance(ch, (discord.TextChannel, discord.Thread, discord.VoiceChannel)) and ch.is_nsfw() - ): - return True - raise NSFWChannelRequired(ch) # type: ignore - - def decorator(func: Union[Command, CoroFunc]) -> Union[Command, CoroFunc]: - if isinstance(func, Command): - func.checks.append(predicate) - if hasattr(func, '__commands_is_hybrid__'): - app_command = getattr(func, 'app_command', None) - if app_command: - app_command.nsfw = True - else: - if not hasattr(func, '__commands_checks__'): - func.__commands_checks__ = [] - - func.__commands_checks__.append(predicate) - func.__discord_app_commands_is_nsfw__ = True - - return func - - if inspect.iscoroutinefunction(predicate): - decorator.predicate = predicate - else: - - @functools.wraps(predicate) - async def wrapper(ctx: Context[BotT]): - return predicate(ctx) - - decorator.predicate = wrapper - - return decorator # type: ignore - - -def cooldown( - rate: int, - per: float, - type: Union[BucketType, Callable[[Context[Any]], Any]] = BucketType.default, -) -> Callable[[T], T]: - """A decorator that adds a cooldown to a :class:`.Command` - - A cooldown allows a command to only be used a specific amount - of times in a specific time frame. These cooldowns can be based - either on a per-guild, per-channel, per-user, per-role or global basis. - Denoted by the third argument of ``type`` which must be of enum - type :class:`.BucketType`. - - If a cooldown is triggered, then :exc:`.CommandOnCooldown` is triggered in - :func:`.on_command_error` and the local error handler. - - A command can only have a single cooldown. - - Parameters - ------------ - rate: :class:`int` - The number of times a command can be used before triggering a cooldown. - per: :class:`float` - The amount of seconds to wait for a cooldown when it's been triggered. - type: Union[:class:`.BucketType`, Callable[[:class:`.Context`], Any]] - The type of cooldown to have. If callable, should return a key for the mapping. - - .. versionchanged:: 1.7 - Callables are now supported for custom bucket types. - - .. versionchanged:: 2.0 - When passing a callable, it now needs to accept :class:`.Context` - rather than :class:`~discord.Message` as its only argument. - """ - - def decorator(func: Union[Command, CoroFunc]) -> Union[Command, CoroFunc]: - if isinstance(func, Command): - func._buckets = CooldownMapping(Cooldown(rate, per), type) - else: - func.__commands_cooldown__ = CooldownMapping(Cooldown(rate, per), type) - return func - - return decorator # type: ignore - - -def dynamic_cooldown( - cooldown: Callable[[Context[Any]], Optional[Cooldown]], - type: Union[BucketType, Callable[[Context[Any]], Any]], -) -> Callable[[T], T]: - """A decorator that adds a dynamic cooldown to a :class:`.Command` - - This differs from :func:`.cooldown` in that it takes a function that - accepts a single parameter of type :class:`.Context` and must - return a :class:`~discord.app_commands.Cooldown` or ``None``. - If ``None`` is returned then that cooldown is effectively bypassed. - - A cooldown allows a command to only be used a specific amount - of times in a specific time frame. These cooldowns can be based - either on a per-guild, per-channel, per-user, per-role or global basis. - Denoted by the third argument of ``type`` which must be of enum - type :class:`.BucketType`. - - If a cooldown is triggered, then :exc:`.CommandOnCooldown` is triggered in - :func:`.on_command_error` and the local error handler. - - A command can only have a single cooldown. - - .. versionadded:: 2.0 - - Parameters - ------------ - cooldown: Callable[[:class:`.Context`], Optional[:class:`~discord.app_commands.Cooldown`]] - A function that takes a message and returns a cooldown that will - apply to this invocation or ``None`` if the cooldown should be bypassed. - type: :class:`.BucketType` - The type of cooldown to have. - """ - if not callable(cooldown): - raise TypeError("A callable must be provided") - - if type is BucketType.default: - raise ValueError('BucketType.default cannot be used in dynamic cooldowns') - - def decorator(func: Union[Command, CoroFunc]) -> Union[Command, CoroFunc]: - if isinstance(func, Command): - func._buckets = DynamicCooldownMapping(cooldown, type) - else: - func.__commands_cooldown__ = DynamicCooldownMapping(cooldown, type) - return func - - return decorator # type: ignore - - -def max_concurrency(number: int, per: BucketType = BucketType.default, *, wait: bool = False) -> Callable[[T], T]: - """A decorator that adds a maximum concurrency to a :class:`.Command` or its subclasses. - - This enables you to only allow a certain number of command invocations at the same time, - for example if a command takes too long or if only one user can use it at a time. This - differs from a cooldown in that there is no set waiting period or token bucket -- only - a set number of people can run the command. - - .. versionadded:: 1.3 - - Parameters - ------------- - number: :class:`int` - The maximum number of invocations of this command that can be running at the same time. - per: :class:`.BucketType` - The bucket that this concurrency is based on, e.g. ``BucketType.guild`` would allow - it to be used up to ``number`` times per guild. - wait: :class:`bool` - Whether the command should wait for the queue to be over. If this is set to ``False`` - then instead of waiting until the command can run again, the command raises - :exc:`.MaxConcurrencyReached` to its error handler. If this is set to ``True`` - then the command waits until it can be executed. - """ - - def decorator(func: Union[Command, CoroFunc]) -> Union[Command, CoroFunc]: - value = MaxConcurrency(number, per=per, wait=wait) - if isinstance(func, Command): - func._max_concurrency = value - else: - func.__commands_max_concurrency__ = value - return func - - return decorator # type: ignore - - -def before_invoke(coro: Hook[CogT, ContextT], /) -> Callable[[T], T]: - """A decorator that registers a coroutine as a pre-invoke hook. - - This allows you to refer to one before invoke hook for several commands that - do not have to be within the same cog. - - .. versionadded:: 1.4 - - .. versionchanged:: 2.0 - - ``coro`` parameter is now positional-only. - - Example - --------- - - .. code-block:: python3 - - async def record_usage(ctx): - print(ctx.author, 'used', ctx.command, 'at', ctx.message.created_at) - - @bot.command() - @commands.before_invoke(record_usage) - async def who(ctx): # Output: used who at