diff --git a/news/6293.vendor.rst b/news/6293.vendor.rst new file mode 100644 index 0000000000..afdf175bec --- /dev/null +++ b/news/6293.vendor.rst @@ -0,0 +1,8 @@ +Vendoring +--------- +* Update vendored dependencies: + - importlib-metadata from 8.4.0 to 8.5.0 + - packaging from 24.0 to 24.1 + - tomli from 2.0.1 to 2.0.2 + - tomlkit from 0.12.4 to 0.13.2 + - zipp from 3.18.1 to 3.20.2 diff --git a/pipenv/vendor/importlib_metadata/__init__.py b/pipenv/vendor/importlib_metadata/__init__.py index a3fd94ecff..1e5961a02f 100644 --- a/pipenv/vendor/importlib_metadata/__init__.py +++ b/pipenv/vendor/importlib_metadata/__init__.py @@ -1,23 +1,34 @@ +""" +APIs exposing metadata from third-party Python packages. + +This codebase is shared between importlib.metadata in the stdlib +and importlib_metadata in PyPI. See +https://github.com/python/importlib_metadata/wiki/Development-Methodology +for more detail. +""" + from __future__ import annotations -import os -import re import abc -import sys -import json -import pipenv.vendor.zipp as zipp +import collections import email -import types -import pathlib -import operator -import textwrap import functools import itertools +import operator +import os +import pathlib import posixpath -import collections +import re +import sys +import textwrap +import types +from contextlib import suppress +from importlib import import_module +from importlib.abc import MetaPathFinder +from itertools import starmap +from typing import Any, Iterable, List, Mapping, Match, Optional, Set, cast from . import _meta -from .compat import py39, py311 from ._collections import FreezableDefaultDict, Pair from ._compat import ( NullFinder, @@ -26,12 +37,7 @@ from ._functools import method_cache, pass_none from ._itertools import always_iterable, bucket, unique_everseen from ._meta import PackageMetadata, SimplePath - -from contextlib import suppress -from importlib import import_module -from importlib.abc import MetaPathFinder -from itertools import starmap -from typing import Any, Iterable, List, Mapping, Match, Optional, Set, cast +from .compat import py39, py311 __all__ = [ 'Distribution', @@ -57,7 +63,7 @@ def __str__(self) -> str: return f"No package metadata was found for {self.name}" @property - def name(self) -> str: # type: ignore[override] + def name(self) -> str: # type: ignore[override] # make readonly (name,) = self.args return name @@ -275,7 +281,7 @@ class EntryPoints(tuple): __slots__ = () - def __getitem__(self, name: str) -> EntryPoint: # type: ignore[override] + def __getitem__(self, name: str) -> EntryPoint: # type: ignore[override] # Work with str instead of int """ Get the EntryPoint in self matching name. """ @@ -331,7 +337,7 @@ class PackagePath(pathlib.PurePosixPath): size: int dist: Distribution - def read_text(self, encoding: str = 'utf-8') -> str: # type: ignore[override] + def read_text(self, encoding: str = 'utf-8') -> str: return self.locate().read_text(encoding=encoding) def read_binary(self) -> bytes: @@ -666,6 +672,9 @@ def origin(self): return self._load_json('direct_url.json') def _load_json(self, filename): + # Deferred for performance (python/importlib_metadata#503) + import json + return pass_none(json.loads)( self.read_text(filename), object_hook=lambda data: types.SimpleNamespace(**data), @@ -750,7 +759,7 @@ class FastPath: True """ - @functools.lru_cache() # type: ignore + @functools.lru_cache() # type: ignore[misc] def __new__(cls, root): return super().__new__(cls) @@ -768,7 +777,10 @@ def children(self): return [] def zip_children(self): - zip_path = zipp.Path(self.root) + # deferred for performance (python/importlib_metadata#502) + from pipenv.vendor.zipp.compat.overlay import zipfile + + zip_path = zipfile.Path(self.root) names = zip_path.root.namelist() self.joinpath = zip_path.joinpath @@ -1108,7 +1120,7 @@ def _get_toplevel_name(name: PackagePath) -> str: # Defer import of inspect for performance (python/cpython#118761) import inspect - return _topmost(name) or (inspect.getmodulename(name) or str(name)) + return _topmost(name) or inspect.getmodulename(name) or str(name) def _top_level_inferred(dist): diff --git a/pipenv/vendor/importlib_metadata/_adapters.py b/pipenv/vendor/importlib_metadata/_adapters.py index 6223263ed5..3b516a2d06 100644 --- a/pipenv/vendor/importlib_metadata/_adapters.py +++ b/pipenv/vendor/importlib_metadata/_adapters.py @@ -1,6 +1,6 @@ +import email.message import re import textwrap -import email.message from ._text import FoldedCase diff --git a/pipenv/vendor/importlib_metadata/_compat.py b/pipenv/vendor/importlib_metadata/_compat.py index df312b1cbb..01356d69b9 100644 --- a/pipenv/vendor/importlib_metadata/_compat.py +++ b/pipenv/vendor/importlib_metadata/_compat.py @@ -1,6 +1,5 @@ -import sys import platform - +import sys __all__ = ['install', 'NullFinder'] diff --git a/pipenv/vendor/importlib_metadata/_functools.py b/pipenv/vendor/importlib_metadata/_functools.py index 71f66bd03c..5dda6a2199 100644 --- a/pipenv/vendor/importlib_metadata/_functools.py +++ b/pipenv/vendor/importlib_metadata/_functools.py @@ -1,5 +1,5 @@ -import types import functools +import types # from jaraco.functools 3.3 diff --git a/pipenv/vendor/importlib_metadata/_meta.py b/pipenv/vendor/importlib_metadata/_meta.py index 1927d0f624..0942bbd963 100644 --- a/pipenv/vendor/importlib_metadata/_meta.py +++ b/pipenv/vendor/importlib_metadata/_meta.py @@ -1,9 +1,17 @@ from __future__ import annotations import os -from typing import Protocol -from typing import Any, Dict, Iterator, List, Optional, TypeVar, Union, overload - +from typing import ( + Any, + Dict, + Iterator, + List, + Optional, + Protocol, + TypeVar, + Union, + overload, +) _T = TypeVar("_T") diff --git a/pipenv/vendor/packaging/__init__.py b/pipenv/vendor/packaging/__init__.py index e7c0aa12ca..9ba41d8357 100644 --- a/pipenv/vendor/packaging/__init__.py +++ b/pipenv/vendor/packaging/__init__.py @@ -6,7 +6,7 @@ __summary__ = "Core utilities for Python packages" __uri__ = "https://github.com/pypa/packaging" -__version__ = "24.0" +__version__ = "24.1" __author__ = "Donald Stufft and individual contributors" __email__ = "donald@stufft.io" diff --git a/pipenv/vendor/packaging/_elffile.py b/pipenv/vendor/packaging/_elffile.py index 6fb19b30bb..f7a02180bf 100644 --- a/pipenv/vendor/packaging/_elffile.py +++ b/pipenv/vendor/packaging/_elffile.py @@ -8,10 +8,12 @@ ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html """ +from __future__ import annotations + import enum import os import struct -from typing import IO, Optional, Tuple +from typing import IO class ELFInvalid(ValueError): @@ -87,11 +89,11 @@ def __init__(self, f: IO[bytes]) -> None: except struct.error as e: raise ELFInvalid("unable to parse machine and section information") from e - def _read(self, fmt: str) -> Tuple[int, ...]: + def _read(self, fmt: str) -> tuple[int, ...]: return struct.unpack(fmt, self._f.read(struct.calcsize(fmt))) @property - def interpreter(self) -> Optional[str]: + def interpreter(self) -> str | None: """ The path recorded in the ``PT_INTERP`` section header. """ diff --git a/pipenv/vendor/packaging/_manylinux.py b/pipenv/vendor/packaging/_manylinux.py index ad62505f3f..08f651fbd8 100644 --- a/pipenv/vendor/packaging/_manylinux.py +++ b/pipenv/vendor/packaging/_manylinux.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import collections import contextlib import functools @@ -5,7 +7,7 @@ import re import sys import warnings -from typing import Dict, Generator, Iterator, NamedTuple, Optional, Sequence, Tuple +from typing import Generator, Iterator, NamedTuple, Sequence from ._elffile import EIClass, EIData, ELFFile, EMachine @@ -17,7 +19,7 @@ # `os.PathLike` not a generic type until Python 3.9, so sticking with `str` # as the type for `path` until then. @contextlib.contextmanager -def _parse_elf(path: str) -> Generator[Optional[ELFFile], None, None]: +def _parse_elf(path: str) -> Generator[ELFFile | None, None, None]: try: with open(path, "rb") as f: yield ELFFile(f) @@ -72,7 +74,7 @@ def _have_compatible_abi(executable: str, archs: Sequence[str]) -> bool: # For now, guess what the highest minor version might be, assume it will # be 50 for testing. Once this actually happens, update the dictionary # with the actual value. -_LAST_GLIBC_MINOR: Dict[int, int] = collections.defaultdict(lambda: 50) +_LAST_GLIBC_MINOR: dict[int, int] = collections.defaultdict(lambda: 50) class _GLibCVersion(NamedTuple): @@ -80,7 +82,7 @@ class _GLibCVersion(NamedTuple): minor: int -def _glibc_version_string_confstr() -> Optional[str]: +def _glibc_version_string_confstr() -> str | None: """ Primary implementation of glibc_version_string using os.confstr. """ @@ -90,7 +92,7 @@ def _glibc_version_string_confstr() -> Optional[str]: # https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183 try: # Should be a string like "glibc 2.17". - version_string: Optional[str] = os.confstr("CS_GNU_LIBC_VERSION") + version_string: str | None = os.confstr("CS_GNU_LIBC_VERSION") assert version_string is not None _, version = version_string.rsplit() except (AssertionError, AttributeError, OSError, ValueError): @@ -99,7 +101,7 @@ def _glibc_version_string_confstr() -> Optional[str]: return version -def _glibc_version_string_ctypes() -> Optional[str]: +def _glibc_version_string_ctypes() -> str | None: """ Fallback implementation of glibc_version_string using ctypes. """ @@ -143,12 +145,12 @@ def _glibc_version_string_ctypes() -> Optional[str]: return version_str -def _glibc_version_string() -> Optional[str]: +def _glibc_version_string() -> str | None: """Returns glibc version string, or None if not using glibc.""" return _glibc_version_string_confstr() or _glibc_version_string_ctypes() -def _parse_glibc_version(version_str: str) -> Tuple[int, int]: +def _parse_glibc_version(version_str: str) -> tuple[int, int]: """Parse glibc version. We use a regexp instead of str.split because we want to discard any @@ -167,8 +169,8 @@ def _parse_glibc_version(version_str: str) -> Tuple[int, int]: return int(m.group("major")), int(m.group("minor")) -@functools.lru_cache() -def _get_glibc_version() -> Tuple[int, int]: +@functools.lru_cache +def _get_glibc_version() -> tuple[int, int]: version_str = _glibc_version_string() if version_str is None: return (-1, -1) diff --git a/pipenv/vendor/packaging/_musllinux.py b/pipenv/vendor/packaging/_musllinux.py index 86419df9d7..d2bf30b563 100644 --- a/pipenv/vendor/packaging/_musllinux.py +++ b/pipenv/vendor/packaging/_musllinux.py @@ -4,11 +4,13 @@ linked against musl, and what musl version is used. """ +from __future__ import annotations + import functools import re import subprocess import sys -from typing import Iterator, NamedTuple, Optional, Sequence +from typing import Iterator, NamedTuple, Sequence from ._elffile import ELFFile @@ -18,7 +20,7 @@ class _MuslVersion(NamedTuple): minor: int -def _parse_musl_version(output: str) -> Optional[_MuslVersion]: +def _parse_musl_version(output: str) -> _MuslVersion | None: lines = [n for n in (n.strip() for n in output.splitlines()) if n] if len(lines) < 2 or lines[0][:4] != "musl": return None @@ -28,8 +30,8 @@ def _parse_musl_version(output: str) -> Optional[_MuslVersion]: return _MuslVersion(major=int(m.group(1)), minor=int(m.group(2))) -@functools.lru_cache() -def _get_musl_version(executable: str) -> Optional[_MuslVersion]: +@functools.lru_cache +def _get_musl_version(executable: str) -> _MuslVersion | None: """Detect currently-running musl runtime version. This is done by checking the specified executable's dynamic linking diff --git a/pipenv/vendor/packaging/_parser.py b/pipenv/vendor/packaging/_parser.py index 684df75457..c1238c06ea 100644 --- a/pipenv/vendor/packaging/_parser.py +++ b/pipenv/vendor/packaging/_parser.py @@ -1,11 +1,13 @@ """Handwritten parser of dependency specifiers. -The docstring for each __parse_* function contains ENBF-inspired grammar representing +The docstring for each __parse_* function contains EBNF-inspired grammar representing the implementation. """ +from __future__ import annotations + import ast -from typing import Any, List, NamedTuple, Optional, Tuple, Union +from typing import NamedTuple, Sequence, Tuple, Union from ._tokenizer import DEFAULT_RULES, Tokenizer @@ -41,20 +43,16 @@ def serialize(self) -> str: MarkerVar = Union[Variable, Value] MarkerItem = Tuple[MarkerVar, Op, MarkerVar] -# MarkerAtom = Union[MarkerItem, List["MarkerAtom"]] -# MarkerList = List[Union["MarkerList", MarkerAtom, str]] -# mypy does not support recursive type definition -# https://github.com/python/mypy/issues/731 -MarkerAtom = Any -MarkerList = List[Any] +MarkerAtom = Union[MarkerItem, Sequence["MarkerAtom"]] +MarkerList = Sequence[Union["MarkerList", MarkerAtom, str]] class ParsedRequirement(NamedTuple): name: str url: str - extras: List[str] + extras: list[str] specifier: str - marker: Optional[MarkerList] + marker: MarkerList | None # -------------------------------------------------------------------------------------- @@ -87,7 +85,7 @@ def _parse_requirement(tokenizer: Tokenizer) -> ParsedRequirement: def _parse_requirement_details( tokenizer: Tokenizer, -) -> Tuple[str, str, Optional[MarkerList]]: +) -> tuple[str, str, MarkerList | None]: """ requirement_details = AT URL (WS requirement_marker?)? | specifier WS? (requirement_marker)? @@ -156,7 +154,7 @@ def _parse_requirement_marker( return marker -def _parse_extras(tokenizer: Tokenizer) -> List[str]: +def _parse_extras(tokenizer: Tokenizer) -> list[str]: """ extras = (LEFT_BRACKET wsp* extras_list? wsp* RIGHT_BRACKET)? """ @@ -175,11 +173,11 @@ def _parse_extras(tokenizer: Tokenizer) -> List[str]: return extras -def _parse_extras_list(tokenizer: Tokenizer) -> List[str]: +def _parse_extras_list(tokenizer: Tokenizer) -> list[str]: """ extras_list = identifier (wsp* ',' wsp* identifier)* """ - extras: List[str] = [] + extras: list[str] = [] if not tokenizer.check("IDENTIFIER"): return extras diff --git a/pipenv/vendor/packaging/_tokenizer.py b/pipenv/vendor/packaging/_tokenizer.py index dd0d648d49..89d041605c 100644 --- a/pipenv/vendor/packaging/_tokenizer.py +++ b/pipenv/vendor/packaging/_tokenizer.py @@ -1,7 +1,9 @@ +from __future__ import annotations + import contextlib import re from dataclasses import dataclass -from typing import Dict, Iterator, NoReturn, Optional, Tuple, Union +from typing import Iterator, NoReturn from .specifiers import Specifier @@ -21,7 +23,7 @@ def __init__( message: str, *, source: str, - span: Tuple[int, int], + span: tuple[int, int], ) -> None: self.span = span self.message = message @@ -34,7 +36,7 @@ def __str__(self) -> str: return "\n ".join([self.message, self.source, marker]) -DEFAULT_RULES: "Dict[str, Union[str, re.Pattern[str]]]" = { +DEFAULT_RULES: dict[str, str | re.Pattern[str]] = { "LEFT_PARENTHESIS": r"\(", "RIGHT_PARENTHESIS": r"\)", "LEFT_BRACKET": r"\[", @@ -96,13 +98,13 @@ def __init__( self, source: str, *, - rules: "Dict[str, Union[str, re.Pattern[str]]]", + rules: dict[str, str | re.Pattern[str]], ) -> None: self.source = source - self.rules: Dict[str, re.Pattern[str]] = { + self.rules: dict[str, re.Pattern[str]] = { name: re.compile(pattern) for name, pattern in rules.items() } - self.next_token: Optional[Token] = None + self.next_token: Token | None = None self.position = 0 def consume(self, name: str) -> None: @@ -154,8 +156,8 @@ def raise_syntax_error( self, message: str, *, - span_start: Optional[int] = None, - span_end: Optional[int] = None, + span_start: int | None = None, + span_end: int | None = None, ) -> NoReturn: """Raise ParserSyntaxError at the given position.""" span = ( diff --git a/pipenv/vendor/packaging/markers.py b/pipenv/vendor/packaging/markers.py index 8b98fca723..7ac7bb69a5 100644 --- a/pipenv/vendor/packaging/markers.py +++ b/pipenv/vendor/packaging/markers.py @@ -2,20 +2,16 @@ # 2.0, and the BSD License. See the LICENSE file in the root of this repository # for complete details. +from __future__ import annotations + import operator import os import platform import sys -from typing import Any, Callable, Dict, List, Optional, Tuple, Union - -from ._parser import ( - MarkerAtom, - MarkerList, - Op, - Value, - Variable, - parse_marker as _parse_marker, -) +from typing import Any, Callable, TypedDict, cast + +from ._parser import MarkerAtom, MarkerList, Op, Value, Variable +from ._parser import parse_marker as _parse_marker from ._tokenizer import ParserSyntaxError from .specifiers import InvalidSpecifier, Specifier from .utils import canonicalize_name @@ -50,6 +46,78 @@ class UndefinedEnvironmentName(ValueError): """ +class Environment(TypedDict): + implementation_name: str + """The implementation's identifier, e.g. ``'cpython'``.""" + + implementation_version: str + """ + The implementation's version, e.g. ``'3.13.0a2'`` for CPython 3.13.0a2, or + ``'7.3.13'`` for PyPy3.10 v7.3.13. + """ + + os_name: str + """ + The value of :py:data:`os.name`. The name of the operating system dependent module + imported, e.g. ``'posix'``. + """ + + platform_machine: str + """ + Returns the machine type, e.g. ``'i386'``. + + An empty string if the value cannot be determined. + """ + + platform_release: str + """ + The system's release, e.g. ``'2.2.0'`` or ``'NT'``. + + An empty string if the value cannot be determined. + """ + + platform_system: str + """ + The system/OS name, e.g. ``'Linux'``, ``'Windows'`` or ``'Java'``. + + An empty string if the value cannot be determined. + """ + + platform_version: str + """ + The system's release version, e.g. ``'#3 on degas'``. + + An empty string if the value cannot be determined. + """ + + python_full_version: str + """ + The Python version as string ``'major.minor.patchlevel'``. + + Note that unlike the Python :py:data:`sys.version`, this value will always include + the patchlevel (it defaults to 0). + """ + + platform_python_implementation: str + """ + A string identifying the Python implementation, e.g. ``'CPython'``. + """ + + python_version: str + """The Python version as string ``'major.minor'``.""" + + sys_platform: str + """ + This string contains a platform identifier that can be used to append + platform-specific components to :py:data:`sys.path`, for instance. + + For Unix systems, except on Linux and AIX, this is the lowercased OS name as + returned by ``uname -s`` with the first part of the version as returned by + ``uname -r`` appended, e.g. ``'sunos5'`` or ``'freebsd8'``, at the time when Python + was built. + """ + + def _normalize_extra_values(results: Any) -> Any: """ Normalize extra values. @@ -67,9 +135,8 @@ def _normalize_extra_values(results: Any) -> Any: def _format_marker( - marker: Union[List[str], MarkerAtom, str], first: Optional[bool] = True + marker: list[str] | MarkerAtom | str, first: bool | None = True ) -> str: - assert isinstance(marker, (list, tuple, str)) # Sometimes we have a structure like [[...]] which is a single item list @@ -95,7 +162,7 @@ def _format_marker( return marker -_operators: Dict[str, Operator] = { +_operators: dict[str, Operator] = { "in": lambda lhs, rhs: lhs in rhs, "not in": lambda lhs, rhs: lhs not in rhs, "<": operator.lt, @@ -115,14 +182,14 @@ def _eval_op(lhs: str, op: Op, rhs: str) -> bool: else: return spec.contains(lhs, prereleases=True) - oper: Optional[Operator] = _operators.get(op.serialize()) + oper: Operator | None = _operators.get(op.serialize()) if oper is None: raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.") return oper(lhs, rhs) -def _normalize(*values: str, key: str) -> Tuple[str, ...]: +def _normalize(*values: str, key: str) -> tuple[str, ...]: # PEP 685 – Comparison of extra names for optional distribution dependencies # https://peps.python.org/pep-0685/ # > When comparing extra names, tools MUST normalize the names being @@ -134,8 +201,8 @@ def _normalize(*values: str, key: str) -> Tuple[str, ...]: return values -def _evaluate_markers(markers: MarkerList, environment: Dict[str, str]) -> bool: - groups: List[List[bool]] = [[]] +def _evaluate_markers(markers: MarkerList, environment: dict[str, str]) -> bool: + groups: list[list[bool]] = [[]] for marker in markers: assert isinstance(marker, (list, tuple, str)) @@ -164,7 +231,7 @@ def _evaluate_markers(markers: MarkerList, environment: Dict[str, str]) -> bool: return any(all(item) for item in groups) -def format_full_version(info: "sys._version_info") -> str: +def format_full_version(info: sys._version_info) -> str: version = "{0.major}.{0.minor}.{0.micro}".format(info) kind = info.releaselevel if kind != "final": @@ -172,7 +239,7 @@ def format_full_version(info: "sys._version_info") -> str: return version -def default_environment() -> Dict[str, str]: +def default_environment() -> Environment: iver = format_full_version(sys.implementation.version) implementation_name = sys.implementation.name return { @@ -231,7 +298,7 @@ def __eq__(self, other: Any) -> bool: return str(self) == str(other) - def evaluate(self, environment: Optional[Dict[str, str]] = None) -> bool: + def evaluate(self, environment: dict[str, str] | None = None) -> bool: """Evaluate a marker. Return the boolean from evaluating the given marker against the @@ -240,8 +307,14 @@ def evaluate(self, environment: Optional[Dict[str, str]] = None) -> bool: The environment is determined from the current Python process. """ - current_environment = default_environment() + current_environment = cast("dict[str, str]", default_environment()) current_environment["extra"] = "" + # Work around platform.python_version() returning something that is not PEP 440 + # compliant for non-tagged Python builds. We preserve default_environment()'s + # behavior of returning platform.python_version() verbatim, and leave it to the + # caller to provide a syntactically valid version if they want to override it. + if current_environment["python_full_version"].endswith("+"): + current_environment["python_full_version"] += "local" if environment is not None: current_environment.update(environment) # The API used to allow setting extra to None. We need to handle this diff --git a/pipenv/vendor/packaging/metadata.py b/pipenv/vendor/packaging/metadata.py index 73421e5d84..eb8dc844d2 100644 --- a/pipenv/vendor/packaging/metadata.py +++ b/pipenv/vendor/packaging/metadata.py @@ -1,50 +1,31 @@ +from __future__ import annotations + import email.feedparser import email.header import email.message import email.parser import email.policy -import sys import typing from typing import ( Any, Callable, - Dict, Generic, - List, - Optional, - Tuple, - Type, - Union, + Literal, + TypedDict, cast, ) -from . import requirements, specifiers, utils, version as version_module +from . import requirements, specifiers, utils +from . import version as version_module T = typing.TypeVar("T") -if sys.version_info[:2] >= (3, 8): # pragma: no cover - from typing import Literal, TypedDict -else: # pragma: no cover - if typing.TYPE_CHECKING: - from pipenv.patched.pip._vendor.typing_extensions import Literal, TypedDict - else: - try: - from pipenv.patched.pip._vendor.typing_extensions import Literal, TypedDict - except ImportError: - - class Literal: - def __init_subclass__(*_args, **_kwargs): - pass - - class TypedDict: - def __init_subclass__(*_args, **_kwargs): - pass try: ExceptionGroup except NameError: # pragma: no cover - class ExceptionGroup(Exception): # noqa: N818 + class ExceptionGroup(Exception): """A minimal implementation of :external:exc:`ExceptionGroup` from Python 3.11. If :external:exc:`ExceptionGroup` is already defined by Python itself, @@ -52,9 +33,9 @@ class ExceptionGroup(Exception): # noqa: N818 """ message: str - exceptions: List[Exception] + exceptions: list[Exception] - def __init__(self, message: str, exceptions: List[Exception]) -> None: + def __init__(self, message: str, exceptions: list[Exception]) -> None: self.message = message self.exceptions = exceptions @@ -100,32 +81,32 @@ class RawMetadata(TypedDict, total=False): metadata_version: str name: str version: str - platforms: List[str] + platforms: list[str] summary: str description: str - keywords: List[str] + keywords: list[str] home_page: str author: str author_email: str license: str # Metadata 1.1 - PEP 314 - supported_platforms: List[str] + supported_platforms: list[str] download_url: str - classifiers: List[str] - requires: List[str] - provides: List[str] - obsoletes: List[str] + classifiers: list[str] + requires: list[str] + provides: list[str] + obsoletes: list[str] # Metadata 1.2 - PEP 345 maintainer: str maintainer_email: str - requires_dist: List[str] - provides_dist: List[str] - obsoletes_dist: List[str] + requires_dist: list[str] + provides_dist: list[str] + obsoletes_dist: list[str] requires_python: str - requires_external: List[str] - project_urls: Dict[str, str] + requires_external: list[str] + project_urls: dict[str, str] # Metadata 2.0 # PEP 426 attempted to completely revamp the metadata format @@ -138,10 +119,10 @@ class RawMetadata(TypedDict, total=False): # Metadata 2.1 - PEP 566 description_content_type: str - provides_extra: List[str] + provides_extra: list[str] # Metadata 2.2 - PEP 643 - dynamic: List[str] + dynamic: list[str] # Metadata 2.3 - PEP 685 # No new fields were added in PEP 685, just some edge case were @@ -185,12 +166,12 @@ class RawMetadata(TypedDict, total=False): } -def _parse_keywords(data: str) -> List[str]: +def _parse_keywords(data: str) -> list[str]: """Split a string of comma-separate keyboards into a list of keywords.""" return [k.strip() for k in data.split(",")] -def _parse_project_urls(data: List[str]) -> Dict[str, str]: +def _parse_project_urls(data: list[str]) -> dict[str, str]: """Parse a list of label/URL string pairings separated by a comma.""" urls = {} for pair in data: @@ -230,7 +211,7 @@ def _parse_project_urls(data: List[str]) -> Dict[str, str]: return urls -def _get_payload(msg: email.message.Message, source: Union[bytes, str]) -> str: +def _get_payload(msg: email.message.Message, source: bytes | str) -> str: """Get the body of the message.""" # If our source is a str, then our caller has managed encodings for us, # and we don't need to deal with it. @@ -292,7 +273,7 @@ def _get_payload(msg: email.message.Message, source: Union[bytes, str]) -> str: _RAW_TO_EMAIL_MAPPING = {raw: email for email, raw in _EMAIL_TO_RAW_MAPPING.items()} -def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[str]]]: +def parse_email(data: bytes | str) -> tuple[RawMetadata, dict[str, list[str]]]: """Parse a distribution's metadata stored as email headers (e.g. from ``METADATA``). This function returns a two-item tuple of dicts. The first dict is of @@ -308,8 +289,8 @@ def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[st included in this dict. """ - raw: Dict[str, Union[str, List[str], Dict[str, str]]] = {} - unparsed: Dict[str, List[str]] = {} + raw: dict[str, str | list[str] | dict[str, str]] = {} + unparsed: dict[str, list[str]] = {} if isinstance(data, str): parsed = email.parser.Parser(policy=email.policy.compat32).parsestr(data) @@ -357,7 +338,7 @@ def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[st # The Header object stores it's data as chunks, and each chunk # can be independently encoded, so we'll need to check each # of them. - chunks: List[Tuple[bytes, Optional[str]]] = [] + chunks: list[tuple[bytes, str | None]] = [] for bin, encoding in email.header.decode_header(h): try: bin.decode("utf8", "strict") @@ -499,11 +480,11 @@ def __init__( ) -> None: self.added = added - def __set_name__(self, _owner: "Metadata", name: str) -> None: + def __set_name__(self, _owner: Metadata, name: str) -> None: self.name = name self.raw_name = _RAW_TO_EMAIL_MAPPING[name] - def __get__(self, instance: "Metadata", _owner: Type["Metadata"]) -> T: + def __get__(self, instance: Metadata, _owner: type[Metadata]) -> T: # With Python 3.8, the caching can be replaced with functools.cached_property(). # No need to check the cache as attribute lookup will resolve into the # instance's __dict__ before __get__ is called. @@ -531,7 +512,7 @@ def __get__(self, instance: "Metadata", _owner: Type["Metadata"]) -> T: return cast(T, value) def _invalid_metadata( - self, msg: str, cause: Optional[Exception] = None + self, msg: str, cause: Exception | None = None ) -> InvalidMetadata: exc = InvalidMetadata( self.raw_name, msg.format_map({"field": repr(self.raw_name)}) @@ -606,7 +587,7 @@ def _process_description_content_type(self, value: str) -> str: ) return value - def _process_dynamic(self, value: List[str]) -> List[str]: + def _process_dynamic(self, value: list[str]) -> list[str]: for dynamic_field in map(str.lower, value): if dynamic_field in {"name", "version", "metadata-version"}: raise self._invalid_metadata( @@ -618,8 +599,8 @@ def _process_dynamic(self, value: List[str]) -> List[str]: def _process_provides_extra( self, - value: List[str], - ) -> List[utils.NormalizedName]: + value: list[str], + ) -> list[utils.NormalizedName]: normalized_names = [] try: for name in value: @@ -641,8 +622,8 @@ def _process_requires_python(self, value: str) -> specifiers.SpecifierSet: def _process_requires_dist( self, - value: List[str], - ) -> List[requirements.Requirement]: + value: list[str], + ) -> list[requirements.Requirement]: reqs = [] try: for req in value: @@ -665,7 +646,7 @@ class Metadata: _raw: RawMetadata @classmethod - def from_raw(cls, data: RawMetadata, *, validate: bool = True) -> "Metadata": + def from_raw(cls, data: RawMetadata, *, validate: bool = True) -> Metadata: """Create an instance from :class:`RawMetadata`. If *validate* is true, all metadata will be validated. All exceptions @@ -675,7 +656,7 @@ def from_raw(cls, data: RawMetadata, *, validate: bool = True) -> "Metadata": ins._raw = data.copy() # Mutations occur due to caching enriched values. if validate: - exceptions: List[Exception] = [] + exceptions: list[Exception] = [] try: metadata_version = ins.metadata_version metadata_age = _VALID_METADATA_VERSIONS.index(metadata_version) @@ -722,9 +703,7 @@ def from_raw(cls, data: RawMetadata, *, validate: bool = True) -> "Metadata": return ins @classmethod - def from_email( - cls, data: Union[bytes, str], *, validate: bool = True - ) -> "Metadata": + def from_email(cls, data: bytes | str, *, validate: bool = True) -> Metadata: """Parse metadata from email headers. If *validate* is true, the metadata will be validated. All exceptions @@ -760,66 +739,66 @@ def from_email( *validate* parameter)""" version: _Validator[version_module.Version] = _Validator() """:external:ref:`core-metadata-version` (required)""" - dynamic: _Validator[Optional[List[str]]] = _Validator( + dynamic: _Validator[list[str] | None] = _Validator( added="2.2", ) """:external:ref:`core-metadata-dynamic` (validated against core metadata field names and lowercased)""" - platforms: _Validator[Optional[List[str]]] = _Validator() + platforms: _Validator[list[str] | None] = _Validator() """:external:ref:`core-metadata-platform`""" - supported_platforms: _Validator[Optional[List[str]]] = _Validator(added="1.1") + supported_platforms: _Validator[list[str] | None] = _Validator(added="1.1") """:external:ref:`core-metadata-supported-platform`""" - summary: _Validator[Optional[str]] = _Validator() + summary: _Validator[str | None] = _Validator() """:external:ref:`core-metadata-summary` (validated to contain no newlines)""" - description: _Validator[Optional[str]] = _Validator() # TODO 2.1: can be in body + description: _Validator[str | None] = _Validator() # TODO 2.1: can be in body """:external:ref:`core-metadata-description`""" - description_content_type: _Validator[Optional[str]] = _Validator(added="2.1") + description_content_type: _Validator[str | None] = _Validator(added="2.1") """:external:ref:`core-metadata-description-content-type` (validated)""" - keywords: _Validator[Optional[List[str]]] = _Validator() + keywords: _Validator[list[str] | None] = _Validator() """:external:ref:`core-metadata-keywords`""" - home_page: _Validator[Optional[str]] = _Validator() + home_page: _Validator[str | None] = _Validator() """:external:ref:`core-metadata-home-page`""" - download_url: _Validator[Optional[str]] = _Validator(added="1.1") + download_url: _Validator[str | None] = _Validator(added="1.1") """:external:ref:`core-metadata-download-url`""" - author: _Validator[Optional[str]] = _Validator() + author: _Validator[str | None] = _Validator() """:external:ref:`core-metadata-author`""" - author_email: _Validator[Optional[str]] = _Validator() + author_email: _Validator[str | None] = _Validator() """:external:ref:`core-metadata-author-email`""" - maintainer: _Validator[Optional[str]] = _Validator(added="1.2") + maintainer: _Validator[str | None] = _Validator(added="1.2") """:external:ref:`core-metadata-maintainer`""" - maintainer_email: _Validator[Optional[str]] = _Validator(added="1.2") + maintainer_email: _Validator[str | None] = _Validator(added="1.2") """:external:ref:`core-metadata-maintainer-email`""" - license: _Validator[Optional[str]] = _Validator() + license: _Validator[str | None] = _Validator() """:external:ref:`core-metadata-license`""" - classifiers: _Validator[Optional[List[str]]] = _Validator(added="1.1") + classifiers: _Validator[list[str] | None] = _Validator(added="1.1") """:external:ref:`core-metadata-classifier`""" - requires_dist: _Validator[Optional[List[requirements.Requirement]]] = _Validator( + requires_dist: _Validator[list[requirements.Requirement] | None] = _Validator( added="1.2" ) """:external:ref:`core-metadata-requires-dist`""" - requires_python: _Validator[Optional[specifiers.SpecifierSet]] = _Validator( + requires_python: _Validator[specifiers.SpecifierSet | None] = _Validator( added="1.2" ) """:external:ref:`core-metadata-requires-python`""" # Because `Requires-External` allows for non-PEP 440 version specifiers, we # don't do any processing on the values. - requires_external: _Validator[Optional[List[str]]] = _Validator(added="1.2") + requires_external: _Validator[list[str] | None] = _Validator(added="1.2") """:external:ref:`core-metadata-requires-external`""" - project_urls: _Validator[Optional[Dict[str, str]]] = _Validator(added="1.2") + project_urls: _Validator[dict[str, str] | None] = _Validator(added="1.2") """:external:ref:`core-metadata-project-url`""" # PEP 685 lets us raise an error if an extra doesn't pass `Name` validation # regardless of metadata version. - provides_extra: _Validator[Optional[List[utils.NormalizedName]]] = _Validator( + provides_extra: _Validator[list[utils.NormalizedName] | None] = _Validator( added="2.1", ) """:external:ref:`core-metadata-provides-extra`""" - provides_dist: _Validator[Optional[List[str]]] = _Validator(added="1.2") + provides_dist: _Validator[list[str] | None] = _Validator(added="1.2") """:external:ref:`core-metadata-provides-dist`""" - obsoletes_dist: _Validator[Optional[List[str]]] = _Validator(added="1.2") + obsoletes_dist: _Validator[list[str] | None] = _Validator(added="1.2") """:external:ref:`core-metadata-obsoletes-dist`""" - requires: _Validator[Optional[List[str]]] = _Validator(added="1.1") + requires: _Validator[list[str] | None] = _Validator(added="1.1") """``Requires`` (deprecated)""" - provides: _Validator[Optional[List[str]]] = _Validator(added="1.1") + provides: _Validator[list[str] | None] = _Validator(added="1.1") """``Provides`` (deprecated)""" - obsoletes: _Validator[Optional[List[str]]] = _Validator(added="1.1") + obsoletes: _Validator[list[str] | None] = _Validator(added="1.1") """``Obsoletes`` (deprecated)""" diff --git a/pipenv/vendor/packaging/requirements.py b/pipenv/vendor/packaging/requirements.py index bdc43a7e98..4e068c9567 100644 --- a/pipenv/vendor/packaging/requirements.py +++ b/pipenv/vendor/packaging/requirements.py @@ -1,8 +1,9 @@ # This file is dual licensed under the terms of the Apache License, Version # 2.0, and the BSD License. See the LICENSE file in the root of this repository # for complete details. +from __future__ import annotations -from typing import Any, Iterator, Optional, Set +from typing import Any, Iterator from ._parser import parse_requirement as _parse_requirement from ._tokenizer import ParserSyntaxError @@ -37,10 +38,10 @@ def __init__(self, requirement_string: str) -> None: raise InvalidRequirement(str(e)) from e self.name: str = parsed.name - self.url: Optional[str] = parsed.url or None - self.extras: Set[str] = set(parsed.extras or []) + self.url: str | None = parsed.url or None + self.extras: set[str] = set(parsed.extras or []) self.specifier: SpecifierSet = SpecifierSet(parsed.specifier) - self.marker: Optional[Marker] = None + self.marker: Marker | None = None if parsed.marker is not None: self.marker = Marker.__new__(Marker) self.marker._markers = _normalize_extra_values(parsed.marker) diff --git a/pipenv/vendor/packaging/specifiers.py b/pipenv/vendor/packaging/specifiers.py index b6c9ebbc56..2d1ae65a8a 100644 --- a/pipenv/vendor/packaging/specifiers.py +++ b/pipenv/vendor/packaging/specifiers.py @@ -8,10 +8,12 @@ from pipenv.vendor.packaging.version import Version """ +from __future__ import annotations + import abc import itertools import re -from typing import Callable, Iterable, Iterator, List, Optional, Tuple, TypeVar, Union +from typing import Callable, Iterable, Iterator, TypeVar, Union from .utils import canonicalize_version from .version import Version @@ -64,7 +66,7 @@ def __eq__(self, other: object) -> bool: @property @abc.abstractmethod - def prereleases(self) -> Optional[bool]: + def prereleases(self) -> bool | None: """Whether or not pre-releases as a whole are allowed. This can be set to either ``True`` or ``False`` to explicitly enable or disable @@ -79,14 +81,14 @@ def prereleases(self, value: bool) -> None: """ @abc.abstractmethod - def contains(self, item: str, prereleases: Optional[bool] = None) -> bool: + def contains(self, item: str, prereleases: bool | None = None) -> bool: """ Determines if the given item is contained within this specifier. """ @abc.abstractmethod def filter( - self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None + self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None ) -> Iterator[UnparsedVersionVar]: """ Takes an iterable of items and filters them so that only items which @@ -217,7 +219,7 @@ class Specifier(BaseSpecifier): "===": "arbitrary", } - def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None: + def __init__(self, spec: str = "", prereleases: bool | None = None) -> None: """Initialize a Specifier instance. :param spec: @@ -234,7 +236,7 @@ def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None: if not match: raise InvalidSpecifier(f"Invalid specifier: '{spec}'") - self._spec: Tuple[str, str] = ( + self._spec: tuple[str, str] = ( match.group("operator").strip(), match.group("version").strip(), ) @@ -318,7 +320,7 @@ def __str__(self) -> str: return "{}{}".format(*self._spec) @property - def _canonical_spec(self) -> Tuple[str, str]: + def _canonical_spec(self) -> tuple[str, str]: canonical_version = canonicalize_version( self._spec[1], strip_trailing_zero=(self._spec[0] != "~="), @@ -364,7 +366,6 @@ def _get_operator(self, op: str) -> CallableOperator: return operator_callable def _compare_compatible(self, prospective: Version, spec: str) -> bool: - # Compatible releases have an equivalent combination of >= and ==. That # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to # implement this in terms of the other specifiers instead of @@ -385,7 +386,6 @@ def _compare_compatible(self, prospective: Version, spec: str) -> bool: ) def _compare_equal(self, prospective: Version, spec: str) -> bool: - # We need special logic to handle prefix matching if spec.endswith(".*"): # In the case of prefix matching we want to ignore local segment. @@ -429,21 +429,18 @@ def _compare_not_equal(self, prospective: Version, spec: str) -> bool: return not self._compare_equal(prospective, spec) def _compare_less_than_equal(self, prospective: Version, spec: str) -> bool: - # NB: Local version identifiers are NOT permitted in the version # specifier, so local version labels can be universally removed from # the prospective version. return Version(prospective.public) <= Version(spec) def _compare_greater_than_equal(self, prospective: Version, spec: str) -> bool: - # NB: Local version identifiers are NOT permitted in the version # specifier, so local version labels can be universally removed from # the prospective version. return Version(prospective.public) >= Version(spec) def _compare_less_than(self, prospective: Version, spec_str: str) -> bool: - # Convert our spec to a Version instance, since we'll want to work with # it as a version. spec = Version(spec_str) @@ -468,7 +465,6 @@ def _compare_less_than(self, prospective: Version, spec_str: str) -> bool: return True def _compare_greater_than(self, prospective: Version, spec_str: str) -> bool: - # Convert our spec to a Version instance, since we'll want to work with # it as a version. spec = Version(spec_str) @@ -501,7 +497,7 @@ def _compare_greater_than(self, prospective: Version, spec_str: str) -> bool: def _compare_arbitrary(self, prospective: Version, spec: str) -> bool: return str(prospective).lower() == str(spec).lower() - def __contains__(self, item: Union[str, Version]) -> bool: + def __contains__(self, item: str | Version) -> bool: """Return whether or not the item is contained in this specifier. :param item: The item to check for. @@ -522,9 +518,7 @@ def __contains__(self, item: Union[str, Version]) -> bool: """ return self.contains(item) - def contains( - self, item: UnparsedVersion, prereleases: Optional[bool] = None - ) -> bool: + def contains(self, item: UnparsedVersion, prereleases: bool | None = None) -> bool: """Return whether or not the item is contained in this specifier. :param item: @@ -569,7 +563,7 @@ def contains( return operator_callable(normalized_item, self.version) def filter( - self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None + self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None ) -> Iterator[UnparsedVersionVar]: """Filter items in the given iterable, that match the specifier. @@ -633,7 +627,7 @@ def filter( _prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$") -def _version_split(version: str) -> List[str]: +def _version_split(version: str) -> list[str]: """Split version into components. The split components are intended for version comparison. The logic does @@ -641,7 +635,7 @@ def _version_split(version: str) -> List[str]: components back with :func:`_version_join` may not produce the original version string. """ - result: List[str] = [] + result: list[str] = [] epoch, _, rest = version.rpartition("!") result.append(epoch or "0") @@ -655,7 +649,7 @@ def _version_split(version: str) -> List[str]: return result -def _version_join(components: List[str]) -> str: +def _version_join(components: list[str]) -> str: """Join split version components into a version string. This function assumes the input came from :func:`_version_split`, where the @@ -672,7 +666,7 @@ def _is_not_suffix(segment: str) -> bool: ) -def _pad_version(left: List[str], right: List[str]) -> Tuple[List[str], List[str]]: +def _pad_version(left: list[str], right: list[str]) -> tuple[list[str], list[str]]: left_split, right_split = [], [] # Get the release segment of our versions @@ -700,9 +694,7 @@ class SpecifierSet(BaseSpecifier): specifiers (``>=3.0,!=3.1``), or no specifier at all. """ - def __init__( - self, specifiers: str = "", prereleases: Optional[bool] = None - ) -> None: + def __init__(self, specifiers: str = "", prereleases: bool | None = None) -> None: """Initialize a SpecifierSet instance. :param specifiers: @@ -730,7 +722,7 @@ def __init__( self._prereleases = prereleases @property - def prereleases(self) -> Optional[bool]: + def prereleases(self) -> bool | None: # If we have been given an explicit prerelease modifier, then we'll # pass that through here. if self._prereleases is not None: @@ -787,7 +779,7 @@ def __str__(self) -> str: def __hash__(self) -> int: return hash(self._specs) - def __and__(self, other: Union["SpecifierSet", str]) -> "SpecifierSet": + def __and__(self, other: SpecifierSet | str) -> SpecifierSet: """Return a SpecifierSet which is a combination of the two sets. :param other: The other object to combine with. @@ -883,8 +875,8 @@ def __contains__(self, item: UnparsedVersion) -> bool: def contains( self, item: UnparsedVersion, - prereleases: Optional[bool] = None, - installed: Optional[bool] = None, + prereleases: bool | None = None, + installed: bool | None = None, ) -> bool: """Return whether or not the item is contained in this SpecifierSet. @@ -938,7 +930,7 @@ def contains( return all(s.contains(item, prereleases=prereleases) for s in self._specs) def filter( - self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None + self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None ) -> Iterator[UnparsedVersionVar]: """Filter items in the given iterable, that match the specifiers in this set. @@ -995,8 +987,8 @@ def filter( # which will filter out any pre-releases, unless there are no final # releases. else: - filtered: List[UnparsedVersionVar] = [] - found_prereleases: List[UnparsedVersionVar] = [] + filtered: list[UnparsedVersionVar] = [] + found_prereleases: list[UnparsedVersionVar] = [] for item in iterable: parsed_version = _coerce_version(item) diff --git a/pipenv/vendor/packaging/tags.py b/pipenv/vendor/packaging/tags.py index 89f1926137..6667d29908 100644 --- a/pipenv/vendor/packaging/tags.py +++ b/pipenv/vendor/packaging/tags.py @@ -2,6 +2,8 @@ # 2.0, and the BSD License. See the LICENSE file in the root of this repository # for complete details. +from __future__ import annotations + import logging import platform import re @@ -11,15 +13,10 @@ import sysconfig from importlib.machinery import EXTENSION_SUFFIXES from typing import ( - Dict, - FrozenSet, Iterable, Iterator, - List, - Optional, Sequence, Tuple, - Union, cast, ) @@ -30,7 +27,7 @@ PythonVersion = Sequence[int] MacVersion = Tuple[int, int] -INTERPRETER_SHORT_NAMES: Dict[str, str] = { +INTERPRETER_SHORT_NAMES: dict[str, str] = { "python": "py", # Generic. "cpython": "cp", "pypy": "pp", @@ -96,7 +93,7 @@ def __repr__(self) -> str: return f"<{self} @ {id(self)}>" -def parse_tag(tag: str) -> FrozenSet[Tag]: +def parse_tag(tag: str) -> frozenset[Tag]: """ Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances. @@ -112,8 +109,8 @@ def parse_tag(tag: str) -> FrozenSet[Tag]: return frozenset(tags) -def _get_config_var(name: str, warn: bool = False) -> Union[int, str, None]: - value: Union[int, str, None] = sysconfig.get_config_var(name) +def _get_config_var(name: str, warn: bool = False) -> int | str | None: + value: int | str | None = sysconfig.get_config_var(name) if value is None and warn: logger.debug( "Config variable '%s' is unset, Python ABI tag may be incorrect", name @@ -125,7 +122,7 @@ def _normalize_string(string: str) -> str: return string.replace(".", "_").replace("-", "_").replace(" ", "_") -def _is_threaded_cpython(abis: List[str]) -> bool: +def _is_threaded_cpython(abis: list[str]) -> bool: """ Determine if the ABI corresponds to a threaded (`--disable-gil`) build. @@ -151,7 +148,7 @@ def _abi3_applies(python_version: PythonVersion, threading: bool) -> bool: return len(python_version) > 1 and tuple(python_version) >= (3, 2) and not threading -def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]: +def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> list[str]: py_version = tuple(py_version) # To allow for version comparison. abis = [] version = _version_nodot(py_version[:2]) @@ -185,9 +182,9 @@ def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]: def cpython_tags( - python_version: Optional[PythonVersion] = None, - abis: Optional[Iterable[str]] = None, - platforms: Optional[Iterable[str]] = None, + python_version: PythonVersion | None = None, + abis: Iterable[str] | None = None, + platforms: Iterable[str] | None = None, *, warn: bool = False, ) -> Iterator[Tag]: @@ -244,7 +241,7 @@ def cpython_tags( yield Tag(interpreter, "abi3", platform_) -def _generic_abi() -> List[str]: +def _generic_abi() -> list[str]: """ Return the ABI tag based on EXT_SUFFIX. """ @@ -286,9 +283,9 @@ def _generic_abi() -> List[str]: def generic_tags( - interpreter: Optional[str] = None, - abis: Optional[Iterable[str]] = None, - platforms: Optional[Iterable[str]] = None, + interpreter: str | None = None, + abis: Iterable[str] | None = None, + platforms: Iterable[str] | None = None, *, warn: bool = False, ) -> Iterator[Tag]: @@ -332,9 +329,9 @@ def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]: def compatible_tags( - python_version: Optional[PythonVersion] = None, - interpreter: Optional[str] = None, - platforms: Optional[Iterable[str]] = None, + python_version: PythonVersion | None = None, + interpreter: str | None = None, + platforms: Iterable[str] | None = None, ) -> Iterator[Tag]: """ Yields the sequence of tags that are compatible with a specific version of Python. @@ -366,7 +363,7 @@ def _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str: return "i386" -def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> List[str]: +def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> list[str]: formats = [cpu_arch] if cpu_arch == "x86_64": if version < (10, 4): @@ -399,7 +396,7 @@ def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> List[str]: def mac_platforms( - version: Optional[MacVersion] = None, arch: Optional[str] = None + version: MacVersion | None = None, arch: str | None = None ) -> Iterator[str]: """ Yields the platform tags for a macOS system. diff --git a/pipenv/vendor/packaging/utils.py b/pipenv/vendor/packaging/utils.py index c2c2f75aa8..d33da5bb8b 100644 --- a/pipenv/vendor/packaging/utils.py +++ b/pipenv/vendor/packaging/utils.py @@ -2,8 +2,10 @@ # 2.0, and the BSD License. See the LICENSE file in the root of this repository # for complete details. +from __future__ import annotations + import re -from typing import FrozenSet, NewType, Tuple, Union, cast +from typing import NewType, Tuple, Union, cast from .tags import Tag, parse_tag from .version import InvalidVersion, Version @@ -53,7 +55,7 @@ def is_normalized_name(name: str) -> bool: def canonicalize_version( - version: Union[Version, str], *, strip_trailing_zero: bool = True + version: Version | str, *, strip_trailing_zero: bool = True ) -> str: """ This is very similar to Version.__str__, but has one subtle difference @@ -102,7 +104,7 @@ def canonicalize_version( def parse_wheel_filename( filename: str, -) -> Tuple[NormalizedName, Version, BuildTag, FrozenSet[Tag]]: +) -> tuple[NormalizedName, Version, BuildTag, frozenset[Tag]]: if not filename.endswith(".whl"): raise InvalidWheelFilename( f"Invalid wheel filename (extension must be '.whl'): {filename}" @@ -143,7 +145,7 @@ def parse_wheel_filename( return (name, version, build, tags) -def parse_sdist_filename(filename: str) -> Tuple[NormalizedName, Version]: +def parse_sdist_filename(filename: str) -> tuple[NormalizedName, Version]: if filename.endswith(".tar.gz"): file_stem = filename[: -len(".tar.gz")] elif filename.endswith(".zip"): diff --git a/pipenv/vendor/packaging/version.py b/pipenv/vendor/packaging/version.py index b54f903932..ae4a4068f5 100644 --- a/pipenv/vendor/packaging/version.py +++ b/pipenv/vendor/packaging/version.py @@ -7,9 +7,11 @@ from pipenv.vendor.packaging.version import parse, Version """ +from __future__ import annotations + import itertools import re -from typing import Any, Callable, NamedTuple, Optional, SupportsInt, Tuple, Union +from typing import Any, Callable, NamedTuple, SupportsInt, Tuple, Union from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType @@ -35,14 +37,14 @@ class _Version(NamedTuple): epoch: int - release: Tuple[int, ...] - dev: Optional[Tuple[str, int]] - pre: Optional[Tuple[str, int]] - post: Optional[Tuple[str, int]] - local: Optional[LocalType] + release: tuple[int, ...] + dev: tuple[str, int] | None + pre: tuple[str, int] | None + post: tuple[str, int] | None + local: LocalType | None -def parse(version: str) -> "Version": +def parse(version: str) -> Version: """Parse the given version string. >>> parse('1.0.dev1') @@ -65,7 +67,7 @@ class InvalidVersion(ValueError): class _BaseVersion: - _key: Tuple[Any, ...] + _key: tuple[Any, ...] def __hash__(self) -> int: return hash(self._key) @@ -73,13 +75,13 @@ def __hash__(self) -> int: # Please keep the duplicated `isinstance` check # in the six comparisons hereunder # unless you find a way to avoid adding overhead function calls. - def __lt__(self, other: "_BaseVersion") -> bool: + def __lt__(self, other: _BaseVersion) -> bool: if not isinstance(other, _BaseVersion): return NotImplemented return self._key < other._key - def __le__(self, other: "_BaseVersion") -> bool: + def __le__(self, other: _BaseVersion) -> bool: if not isinstance(other, _BaseVersion): return NotImplemented @@ -91,13 +93,13 @@ def __eq__(self, other: object) -> bool: return self._key == other._key - def __ge__(self, other: "_BaseVersion") -> bool: + def __ge__(self, other: _BaseVersion) -> bool: if not isinstance(other, _BaseVersion): return NotImplemented return self._key >= other._key - def __gt__(self, other: "_BaseVersion") -> bool: + def __gt__(self, other: _BaseVersion) -> bool: if not isinstance(other, _BaseVersion): return NotImplemented @@ -274,7 +276,7 @@ def epoch(self) -> int: return self._version.epoch @property - def release(self) -> Tuple[int, ...]: + def release(self) -> tuple[int, ...]: """The components of the "release" segment of the version. >>> Version("1.2.3").release @@ -290,7 +292,7 @@ def release(self) -> Tuple[int, ...]: return self._version.release @property - def pre(self) -> Optional[Tuple[str, int]]: + def pre(self) -> tuple[str, int] | None: """The pre-release segment of the version. >>> print(Version("1.2.3").pre) @@ -305,7 +307,7 @@ def pre(self) -> Optional[Tuple[str, int]]: return self._version.pre @property - def post(self) -> Optional[int]: + def post(self) -> int | None: """The post-release number of the version. >>> print(Version("1.2.3").post) @@ -316,7 +318,7 @@ def post(self) -> Optional[int]: return self._version.post[1] if self._version.post else None @property - def dev(self) -> Optional[int]: + def dev(self) -> int | None: """The development number of the version. >>> print(Version("1.2.3").dev) @@ -327,7 +329,7 @@ def dev(self) -> Optional[int]: return self._version.dev[1] if self._version.dev else None @property - def local(self) -> Optional[str]: + def local(self) -> str | None: """The local version segment of the version. >>> print(Version("1.2.3").local) @@ -450,9 +452,8 @@ def micro(self) -> int: def _parse_letter_version( - letter: Optional[str], number: Union[str, bytes, SupportsInt, None] -) -> Optional[Tuple[str, int]]: - + letter: str | None, number: str | bytes | SupportsInt | None +) -> tuple[str, int] | None: if letter: # We consider there to be an implicit 0 in a pre-release if there is # not a numeral associated with it. @@ -488,7 +489,7 @@ def _parse_letter_version( _local_version_separators = re.compile(r"[\._-]") -def _parse_local_version(local: Optional[str]) -> Optional[LocalType]: +def _parse_local_version(local: str | None) -> LocalType | None: """ Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve"). """ @@ -502,13 +503,12 @@ def _parse_local_version(local: Optional[str]) -> Optional[LocalType]: def _cmpkey( epoch: int, - release: Tuple[int, ...], - pre: Optional[Tuple[str, int]], - post: Optional[Tuple[str, int]], - dev: Optional[Tuple[str, int]], - local: Optional[LocalType], + release: tuple[int, ...], + pre: tuple[str, int] | None, + post: tuple[str, int] | None, + dev: tuple[str, int] | None, + local: LocalType | None, ) -> CmpKey: - # When we compare a release version, we want to compare it with all of the # trailing zeros removed. So we'll use a reverse the list, drop all the now # leading zeros until we come to something non zero, then take the rest diff --git a/pipenv/vendor/tomli/__init__.py b/pipenv/vendor/tomli/__init__.py index 4c6ec97ec6..bf2b131c07 100644 --- a/pipenv/vendor/tomli/__init__.py +++ b/pipenv/vendor/tomli/__init__.py @@ -3,7 +3,7 @@ # Licensed to PSF under a Contributor Agreement. __all__ = ("loads", "load", "TOMLDecodeError") -__version__ = "2.0.1" # DO NOT EDIT THIS LINE MANUALLY. LET bump2version UTILITY DO IT +__version__ = "2.0.2" # DO NOT EDIT THIS LINE MANUALLY. LET bump2version UTILITY DO IT from ._parser import TOMLDecodeError, load, loads diff --git a/pipenv/vendor/tomli/_parser.py b/pipenv/vendor/tomli/_parser.py index f1bb0aa19a..660c88c01c 100644 --- a/pipenv/vendor/tomli/_parser.py +++ b/pipenv/vendor/tomli/_parser.py @@ -7,7 +7,7 @@ from collections.abc import Iterable import string from types import MappingProxyType -from typing import Any, BinaryIO, NamedTuple +from typing import IO, Any, NamedTuple from ._re import ( RE_DATETIME, @@ -54,7 +54,7 @@ class TOMLDecodeError(ValueError): """An error raised if a document is not valid TOML.""" -def load(__fp: BinaryIO, *, parse_float: ParseFloat = float) -> dict[str, Any]: +def load(__fp: IO[bytes], *, parse_float: ParseFloat = float) -> dict[str, Any]: """Parse TOML from a binary file object.""" b = __fp.read() try: @@ -71,7 +71,12 @@ def loads(__s: str, *, parse_float: ParseFloat = float) -> dict[str, Any]: # no # The spec allows converting "\r\n" to "\n", even in string # literals. Let's do so to simplify parsing. - src = __s.replace("\r\n", "\n") + try: + src = __s.replace("\r\n", "\n") + except (AttributeError, TypeError): + raise TypeError( + f"Expected str object, not '{type(__s).__qualname__}'" + ) from None pos = 0 out = Output(NestedDict(), Flags()) header: Key = () @@ -679,7 +684,7 @@ def make_safe_parse_float(parse_float: ParseFloat) -> ParseFloat: instead of returning illegal types. """ # The default `float` callable never returns illegal types. Optimize it. - if parse_float is float: # type: ignore[comparison-overlap] + if parse_float is float: return float def safe_parse_float(float_str: str) -> Any: diff --git a/pipenv/vendor/tomlkit/__init__.py b/pipenv/vendor/tomlkit/__init__.py index 1379fbcfc3..6bd1f212f7 100644 --- a/pipenv/vendor/tomlkit/__init__.py +++ b/pipenv/vendor/tomlkit/__init__.py @@ -27,7 +27,7 @@ from pipenv.vendor.tomlkit.api import ws -__version__ = "0.12.4" +__version__ = "0.13.2" __all__ = [ "aot", "array", diff --git a/pipenv/vendor/tomlkit/_types.py b/pipenv/vendor/tomlkit/_types.py index 8eeb75ea9d..501bf4dcaa 100644 --- a/pipenv/vendor/tomlkit/_types.py +++ b/pipenv/vendor/tomlkit/_types.py @@ -19,10 +19,10 @@ # Importing from builtins is preferred over simple assignment, see issues: # https://github.com/python/mypy/issues/8715 # https://github.com/python/mypy/issues/10068 - from builtins import dict as _CustomDict # noqa: N812 - from builtins import float as _CustomFloat # noqa: N812 - from builtins import int as _CustomInt # noqa: N812 - from builtins import list as _CustomList # noqa: N812 + from builtins import dict as _CustomDict + from builtins import float as _CustomFloat + from builtins import int as _CustomInt + from builtins import list as _CustomList from typing import Callable from typing import Concatenate from typing import ParamSpec @@ -31,8 +31,7 @@ P = ParamSpec("P") class WrapperType(Protocol): - def _new(self: WT, value: Any) -> WT: - ... + def _new(self: WT, value: Any) -> WT: ... else: from collections.abc import MutableMapping diff --git a/pipenv/vendor/tomlkit/api.py b/pipenv/vendor/tomlkit/api.py index ead8818656..03fd09c8b5 100644 --- a/pipenv/vendor/tomlkit/api.py +++ b/pipenv/vendor/tomlkit/api.py @@ -75,6 +75,11 @@ def dump(data: Mapping, fp: IO[str], *, sort_keys: bool = False) -> None: :param data: a dict-like object to dump :param sort_keys: if true, sort the keys in alphabetic order + + :Example: + + >>> with open("output.toml", "w") as fp: + ... tomlkit.dump(data, fp) """ fp.write(dumps(data, sort_keys=sort_keys)) @@ -160,7 +165,7 @@ def datetime(raw: str) -> DateTime: return item(value) -def array(raw: str = None) -> Array: +def array(raw: str = "[]") -> Array: """Create an array item for its string representation. :Example: @@ -172,9 +177,6 @@ def array(raw: str = None) -> Array: >>> a [1, 2, 3] """ - if raw is None: - raw = "[]" - return value(raw) diff --git a/pipenv/vendor/tomlkit/container.py b/pipenv/vendor/tomlkit/container.py index 8b52dfad6f..588cda2c8d 100644 --- a/pipenv/vendor/tomlkit/container.py +++ b/pipenv/vendor/tomlkit/container.py @@ -155,7 +155,7 @@ def _validate_out_of_order_table(self, key: SingleKey | None = None) -> None: return if key not in self._map or not isinstance(self._map[key], tuple): return - OutOfOrderTableProxy(self, self._map[key]) + OutOfOrderTableProxy.validate(self, self._map[key]) def append( self, key: Key | str | None, item: Item, validate: bool = True @@ -251,9 +251,11 @@ def append( for k, v in item.value.body: current.append(k, v) self._body[ - current_idx[-1] - if isinstance(current_idx, tuple) - else current_idx + ( + current_idx[-1] + if isinstance(current_idx, tuple) + else current_idx + ) ] = (current_body_element[0], current) return self @@ -310,7 +312,7 @@ def _raw_append(self, key: Key | None, item: Item) -> None: if key is not None and not isinstance(current, Table): raise KeyAlreadyPresent(key) - self._map[key] = current_idx + (len(self._body),) + self._map[key] = (*current_idx, len(self._body)) elif key is not None: self._map[key] = len(self._body) @@ -321,8 +323,6 @@ def _raw_append(self, key: Key | None, item: Item) -> None: if key is not None: dict.__setitem__(self, key.key, item.value) - return self - def _remove_at(self, idx: int) -> None: key = self._body[idx][0] index = self._map.get(key) @@ -445,7 +445,7 @@ def _insert_at(self, idx: int, key: Key | str, item: Any) -> Container: current_idx = self._map[key] if not isinstance(current_idx, tuple): current_idx = (current_idx,) - self._map[key] = current_idx + (idx,) + self._map[key] = (*current_idx, idx) else: self._map[key] = idx self._body.insert(idx, (key, item)) @@ -786,7 +786,21 @@ def _previous_item(self, idx: int | None = None, ignore=(Null,)) -> Item | None: class OutOfOrderTableProxy(_CustomDict): - def __init__(self, container: Container, indices: tuple[int]) -> None: + @staticmethod + def validate(container: Container, indices: tuple[int, ...]) -> None: + """Validate out of order tables in the given container""" + # Append all items to a temp container to see if there is any error + temp_container = Container(True) + for i in indices: + _, item = container._body[i] + + if isinstance(item, Table): + for k, v in item.value.body: + temp_container.append(k, v, validate=False) + + temp_container._validate_out_of_order_table() + + def __init__(self, container: Container, indices: tuple[int, ...]) -> None: self._container = container self._internal_container = Container(True) self._tables = [] @@ -799,8 +813,8 @@ def __init__(self, container: Container, indices: tuple[int]) -> None: self._tables.append(item) table_idx = len(self._tables) - 1 for k, v in item.value.body: - self._internal_container.append(k, v, validate=False) - self._tables_map[k] = table_idx + self._internal_container._raw_append(k, v) + self._tables_map.setdefault(k, []).append(table_idx) if k is not None: dict.__setitem__(self, k.key, v) @@ -821,8 +835,12 @@ def __getitem__(self, key: Key | str) -> Any: def __setitem__(self, key: Key | str, item: Any) -> None: if key in self._tables_map: - table = self._tables[self._tables_map[key]] - table[key] = item + # Overwrite the first table and remove others + indices = self._tables_map[key] + while len(indices) > 1: + table = self._tables[indices.pop()] + self._remove_table(table) + self._tables[indices[0]][key] = item elif self._tables: table = self._tables[0] table[key] = item @@ -842,15 +860,16 @@ def _remove_table(self, table: Table) -> None: break def __delitem__(self, key: Key | str) -> None: - if key in self._tables_map: - table = self._tables[self._tables_map[key]] + if key not in self._tables_map: + raise NonExistentKey(key) + + for i in reversed(self._tables_map[key]): + table = self._tables[i] del table[key] if not table and len(self._tables) > 1: self._remove_table(table) - del self._tables_map[key] - else: - raise NonExistentKey(key) + del self._tables_map[key] del self._internal_container[key] if key is not None: dict.__delitem__(self, key) diff --git a/pipenv/vendor/tomlkit/exceptions.py b/pipenv/vendor/tomlkit/exceptions.py index 30d0d85cee..8c7e6e7490 100644 --- a/pipenv/vendor/tomlkit/exceptions.py +++ b/pipenv/vendor/tomlkit/exceptions.py @@ -115,7 +115,7 @@ class UnexpectedCharError(ParseError): """ def __init__(self, line: int, col: int, char: str) -> None: - message = f"Unexpected character: {repr(char)}" + message = f"Unexpected character: {char!r}" super().__init__(line, col, message=message) @@ -148,7 +148,7 @@ class InvalidCharInStringError(ParseError): """ def __init__(self, line: int, col: int, char: str) -> None: - message = f"Invalid character {repr(char)} in string" + message = f"Invalid character {char!r} in string" super().__init__(line, col, message=message) @@ -225,3 +225,10 @@ def __init__(self, value: str, invalid_sequences: Collection[str], delimiter: st f"Invalid string: {delimiter}{repr_}{delimiter}. " f"The character sequences {invalid_sequences} are invalid." ) + + +class ConvertError(TypeError, ValueError, TOMLKitError): + """Raised when item() fails to convert a value. + It should be a TypeError, but due to historical reasons + it needs to subclass ValueError as well. + """ diff --git a/pipenv/vendor/tomlkit/items.py b/pipenv/vendor/tomlkit/items.py index f71f2c2a4e..10e9c3c180 100644 --- a/pipenv/vendor/tomlkit/items.py +++ b/pipenv/vendor/tomlkit/items.py @@ -33,6 +33,7 @@ from pipenv.vendor.tomlkit._types import wrap_method from pipenv.vendor.tomlkit._utils import CONTROL_CHARS from pipenv.vendor.tomlkit._utils import escape_string +from pipenv.vendor.tomlkit.exceptions import ConvertError from pipenv.vendor.tomlkit.exceptions import InvalidStringError @@ -46,75 +47,58 @@ AT = TypeVar("AT", bound="AbstractTable") -class _ConvertError(TypeError, ValueError): - """An internal error raised when item() fails to convert a value. - It should be a TypeError, but due to historical reasons - it needs to subclass ValueError as well. - """ - - @overload -def item(value: bool, _parent: Item | None = ..., _sort_keys: bool = ...) -> Bool: - ... +def item(value: bool, _parent: Item | None = ..., _sort_keys: bool = ...) -> Bool: ... @overload -def item(value: int, _parent: Item | None = ..., _sort_keys: bool = ...) -> Integer: - ... +def item(value: int, _parent: Item | None = ..., _sort_keys: bool = ...) -> Integer: ... @overload -def item(value: float, _parent: Item | None = ..., _sort_keys: bool = ...) -> Float: - ... +def item(value: float, _parent: Item | None = ..., _sort_keys: bool = ...) -> Float: ... @overload -def item(value: str, _parent: Item | None = ..., _sort_keys: bool = ...) -> String: - ... +def item(value: str, _parent: Item | None = ..., _sort_keys: bool = ...) -> String: ... @overload def item( value: datetime, _parent: Item | None = ..., _sort_keys: bool = ... -) -> DateTime: - ... +) -> DateTime: ... @overload -def item(value: date, _parent: Item | None = ..., _sort_keys: bool = ...) -> Date: - ... +def item(value: date, _parent: Item | None = ..., _sort_keys: bool = ...) -> Date: ... @overload -def item(value: time, _parent: Item | None = ..., _sort_keys: bool = ...) -> Time: - ... +def item(value: time, _parent: Item | None = ..., _sort_keys: bool = ...) -> Time: ... @overload def item( value: Sequence[dict], _parent: Item | None = ..., _sort_keys: bool = ... -) -> AoT: - ... +) -> AoT: ... @overload -def item(value: Sequence, _parent: Item | None = ..., _sort_keys: bool = ...) -> Array: - ... +def item( + value: Sequence, _parent: Item | None = ..., _sort_keys: bool = ... +) -> Array: ... @overload -def item(value: dict, _parent: Array = ..., _sort_keys: bool = ...) -> InlineTable: - ... +def item(value: dict, _parent: Array = ..., _sort_keys: bool = ...) -> InlineTable: ... @overload -def item(value: dict, _parent: Item | None = ..., _sort_keys: bool = ...) -> Table: - ... +def item(value: dict, _parent: Item | None = ..., _sort_keys: bool = ...) -> Table: ... @overload -def item(value: ItemT, _parent: Item | None = ..., _sort_keys: bool = ...) -> ItemT: - ... +def item(value: ItemT, _parent: Item | None = ..., _sort_keys: bool = ...) -> ItemT: ... def item(value: Any, _parent: Item | None = None, _sort_keys: bool = False) -> Item: @@ -216,16 +200,16 @@ def item(value: Any, _parent: Item | None = None, _sort_keys: bool = False) -> I for encoder in CUSTOM_ENCODERS: try: rv = encoder(value) - except TypeError: + except ConvertError: pass else: if not isinstance(rv, Item): - raise _ConvertError( - f"Custom encoder returned {type(rv)}, not a subclass of Item" + raise ConvertError( + f"Custom encoder is expected to return an instance of Item, got {type(rv)}" ) return rv - raise _ConvertError(f"Invalid type {type(value)}") + raise ConvertError(f"Unable to convert an object of {type(value)} to a TOML item") class StringType(Enum): @@ -566,7 +550,7 @@ def as_string(self) -> str: return self._s def __repr__(self) -> str: - return f"<{self.__class__.__name__} {repr(self._s)}>" + return f"<{self.__class__.__name__} {self._s!r}>" def _getstate(self, protocol=3): return self._s, self._fixed @@ -629,9 +613,8 @@ def as_string(self) -> str: def _new(self, result): raw = str(result) - if self._sign: - sign = "+" if result >= 0 else "-" - raw = sign + raw + if self._sign and result >= 0: + raw = f"+{raw}" return Integer(result, self._trivia, raw) @@ -724,9 +707,8 @@ def as_string(self) -> str: def _new(self, result): raw = str(result) - if self._sign: - sign = "+" if result >= 0 else "-" - raw = sign + raw + if self._sign and result >= 0: + raw = f"+{raw}" return Float(result, self._trivia, raw) @@ -970,9 +952,14 @@ def __new__(cls, year: int, month: int, day: int, *_: Any) -> date: return date.__new__(cls, year, month, day) def __init__( - self, year: int, month: int, day: int, trivia: Trivia, raw: str + self, + year: int, + month: int, + day: int, + trivia: Trivia | None = None, + raw: str = "", ) -> None: - super().__init__(trivia) + super().__init__(trivia or Trivia()) self._raw = raw @@ -1045,10 +1032,10 @@ def __init__( second: int, microsecond: int, tzinfo: tzinfo | None, - trivia: Trivia, - raw: str, + trivia: Trivia | None = None, + raw: str = "", ) -> None: - super().__init__(trivia) + super().__init__(trivia or Trivia()) self._raw = raw @@ -1457,23 +1444,19 @@ def value(self) -> container.Container: return self._value @overload - def append(self: AT, key: None, value: Comment | Whitespace) -> AT: - ... + def append(self: AT, key: None, value: Comment | Whitespace) -> AT: ... @overload - def append(self: AT, key: Key | str, value: Any) -> AT: - ... + def append(self: AT, key: Key | str, value: Any) -> AT: ... def append(self, key, value): raise NotImplementedError @overload - def add(self: AT, key: Comment | Whitespace) -> AT: - ... + def add(self: AT, key: Comment | Whitespace) -> AT: ... @overload - def add(self: AT, key: Key | str, value: Any = ...) -> AT: - ... + def add(self: AT, key: Key | str, value: Any = ...) -> AT: ... def add(self, key, value=None): if value is None: @@ -1638,11 +1621,23 @@ def is_super_table(self) -> bool: If true, it won't appear in the TOML representation.""" if self._is_super_table is not None: return self._is_super_table - # If the table has only one child and that child is a table, then it is a super table. - if len(self) != 1: + if not self: return False - only_child = next(iter(self.values())) - return isinstance(only_child, (Table, AoT)) + # If the table has children and all children are tables, then it is a super table. + for k, child in self.items(): + if not isinstance(k, Key): + k = SingleKey(k) + index = self.value._map[k] + if isinstance(index, tuple): + return False + real_key = self.value.body[index][0] + if ( + not isinstance(child, (Table, AoT)) + or real_key is None + or real_key.is_dotted() + ): + return False + return True def as_string(self) -> str: return self._value.as_string() @@ -1874,12 +1869,10 @@ def __len__(self) -> int: return len(self._body) @overload - def __getitem__(self, key: slice) -> list[Table]: - ... + def __getitem__(self, key: slice) -> list[Table]: ... @overload - def __getitem__(self, key: int) -> Table: - ... + def __getitem__(self, key: int) -> Table: ... def __getitem__(self, key): return self._body[key] diff --git a/pipenv/vendor/tomlkit/parser.py b/pipenv/vendor/tomlkit/parser.py index 3a559d920c..190d482a51 100644 --- a/pipenv/vendor/tomlkit/parser.py +++ b/pipenv/vendor/tomlkit/parser.py @@ -481,7 +481,7 @@ def _parse_value(self) -> Item: raw, ) except ValueError: - raise self.parse_error(InvalidDateTimeError) + raise self.parse_error(InvalidDateTimeError) from None if m.group(1): try: @@ -513,7 +513,7 @@ def _parse_value(self) -> Item: raw + time_part, ) except ValueError: - raise self.parse_error(InvalidDateError) + raise self.parse_error(InvalidDateError) from None if m.group(5): try: @@ -529,7 +529,7 @@ def _parse_value(self) -> Item: raw, ) except ValueError: - raise self.parse_error(InvalidTimeError) + raise self.parse_error(InvalidTimeError) from None item = self._parse_number(raw, trivia) if item is not None: @@ -981,9 +981,9 @@ def _parse_table( is_aot and i == len(name_parts) - 2, is_super_table=i < len(name_parts) - 2, name=_name.key, - display_name=full_key.as_string() - if i == len(name_parts) - 2 - else None, + display_name=( + full_key.as_string() if i == len(name_parts) - 2 else None + ), ), ) diff --git a/pipenv/vendor/tomlkit/source.py b/pipenv/vendor/tomlkit/source.py index 4e4fc24360..3577facd93 100644 --- a/pipenv/vendor/tomlkit/source.py +++ b/pipenv/vendor/tomlkit/source.py @@ -119,7 +119,7 @@ def inc(self, exception: type[ParseError] | None = None) -> bool: self._idx = len(self) self._current = self.EOF if exception: - raise self.parse_error(exception) + raise self.parse_error(exception) from None return False diff --git a/pipenv/vendor/vendor.txt b/pipenv/vendor/vendor.txt index a4fddac865..d6f573a9de 100644 --- a/pipenv/vendor/vendor.txt +++ b/pipenv/vendor/vendor.txt @@ -2,9 +2,9 @@ click-didyoumean==0.3.1 click==8.1.7 colorama==0.4.6 dparse==0.6.3 -importlib-metadata==8.4.0 - zipp==3.18.1 -packaging==24.0 +importlib-metadata==8.5.0 + zipp==3.20.2 +packaging==24.1 pexpect==4.9.0 pipdeptree==2.23.4 plette==2.1.0 @@ -13,5 +13,5 @@ python-dotenv==1.0.1 pythonfinder==2.1.0 ruamel.yaml==0.18.6 shellingham==1.5.4 -tomli==2.0.1 -tomlkit==0.12.4 +tomli==2.0.2 +tomlkit==0.13.2 diff --git a/pipenv/vendor/zipp/__init__.py b/pipenv/vendor/zipp/__init__.py index 6d05d9a078..161a4fb1ce 100644 --- a/pipenv/vendor/zipp/__init__.py +++ b/pipenv/vendor/zipp/__init__.py @@ -1,3 +1,12 @@ +""" +A Path-like interface for zipfiles. + +This codebase is shared between zipfile.Path in the stdlib +and zipp in PyPI. See +https://github.com/python/importlib_metadata/wiki/Development-Methodology +for more detail. +""" + import io import posixpath import zipfile @@ -5,6 +14,7 @@ import contextlib import pathlib import re +import stat import sys from .compat.py310 import text_encoding @@ -36,7 +46,7 @@ def _parents(path): def _ancestry(path): """ Given a path with elements separated by - posixpath.sep, generate all elements of that path + posixpath.sep, generate all elements of that path. >>> list(_ancestry('b/d')) ['b/d', 'b'] @@ -48,9 +58,14 @@ def _ancestry(path): ['b'] >>> list(_ancestry('')) [] + + Multiple separators are treated like a single. + + >>> list(_ancestry('//b//d///f//')) + ['//b//d///f', '//b//d', '//b'] """ path = path.rstrip(posixpath.sep) - while path and path != posixpath.sep: + while path.rstrip(posixpath.sep): yield path path, tail = posixpath.split(path) @@ -188,7 +203,10 @@ def _extract_text_encoding(encoding=None, *args, **kwargs): class Path: """ - A pathlib-compatible interface for zip files. + A :class:`importlib.resources.abc.Traversable` interface for zip files. + + Implements many of the features users enjoy from + :class:`pathlib.Path`. Consider a zip file with this structure:: @@ -263,7 +281,7 @@ class Path: >>> str(path.parent) 'mem' - If the zipfile has no filename, such attribtues are not + If the zipfile has no filename, such attributes are not valid and accessing them will raise an Exception. >>> zf.filename = None @@ -391,9 +409,11 @@ def match(self, path_pattern): def is_symlink(self): """ - Return whether this path is a symlink. Always false (python/cpython#82102). + Return whether this path is a symlink. """ - return False + info = self.root.getinfo(self.at) + mode = info.external_attr >> 16 + return stat.S_ISLNK(mode) def glob(self, pattern): if not pattern: @@ -402,8 +422,7 @@ def glob(self, pattern): prefix = re.escape(self.at) tr = Translator(seps='/') matches = re.compile(prefix + tr.translate(pattern)).fullmatch - names = (data.filename for data in self.root.filelist) - return map(self._next, filter(matches, names)) + return map(self._next, filter(matches, self.root.namelist())) def rglob(self, pattern): return self.glob(f'**/{pattern}') diff --git a/pipenv/vendor/zipp/compat/overlay.py b/pipenv/vendor/zipp/compat/overlay.py new file mode 100644 index 0000000000..59ee00cfac --- /dev/null +++ b/pipenv/vendor/zipp/compat/overlay.py @@ -0,0 +1,37 @@ +""" +Expose zipp.Path as .zipfile.Path. + +Includes everything else in ``zipfile`` to match future usage. Just +use: + +>>> from zipp.compat.overlay import zipfile + +in place of ``import zipfile``. + +Relative imports are supported too. + +>>> from zipp.compat.overlay.zipfile import ZipInfo + +The ``zipfile`` object added to ``sys.modules`` needs to be +hashable (#126). + +>>> _ = hash(sys.modules['zipp.compat.overlay.zipfile']) +""" + +import importlib +import sys +import types + +import pipenv.vendor.zipp as zipp + + +class HashableNamespace(types.SimpleNamespace): + def __hash__(self): + return hash(tuple(vars(self))) + + +zipfile = HashableNamespace(**vars(importlib.import_module('zipfile'))) +zipfile.Path = zipp.Path +zipfile._path = zipp + +sys.modules[__name__ + '.zipfile'] = zipfile # type: ignore[assignment] diff --git a/pipenv/vendor/zipp/compat/py310.py b/pipenv/vendor/zipp/compat/py310.py index d5ca53e037..8264a482f2 100644 --- a/pipenv/vendor/zipp/compat/py310.py +++ b/pipenv/vendor/zipp/compat/py310.py @@ -7,5 +7,7 @@ def _text_encoding(encoding, stacklevel=2, /): # pragma: no cover text_encoding = ( - io.text_encoding if sys.version_info > (3, 10) else _text_encoding # type: ignore + io.text_encoding # type: ignore[unused-ignore, attr-defined] + if sys.version_info > (3, 10) + else _text_encoding ) diff --git a/pipenv/vendor/zipp/glob.py b/pipenv/vendor/zipp/glob.py index 69c41d77c3..4320f1c0ba 100644 --- a/pipenv/vendor/zipp/glob.py +++ b/pipenv/vendor/zipp/glob.py @@ -28,7 +28,7 @@ def translate(self, pattern): """ Given a glob pattern, produce a regex that matches it. """ - return self.extend(self.translate_core(pattern)) + return self.extend(self.match_dirs(self.translate_core(pattern))) def extend(self, pattern): r""" @@ -41,6 +41,14 @@ def extend(self, pattern): """ return rf'(?s:{pattern})\Z' + def match_dirs(self, pattern): + """ + Ensure that zipfile.Path directory names are matched. + + zipfile.Path directory names always end in a slash. + """ + return rf'{pattern}[/]?' + def translate_core(self, pattern): r""" Given a glob pattern, produce a regex that matches it.