diff --git a/.github/workflows/testCode.yaml b/.github/workflows/testCode.yaml index 397b342..4a4459f 100644 --- a/.github/workflows/testCode.yaml +++ b/.github/workflows/testCode.yaml @@ -8,36 +8,38 @@ on: jobs: testCode: - runs-on: windows-latest strategy: matrix: python-version: [3.13] steps: + - name: Install the latest version of uv + uses: astral-sh/setup-uv@v6 - name: Checkout code - uses: actions/checkout@v3 + uses: actions/checkout@v5 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v3 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - architecture: x86 - - name: Install dependencies - run: | - python -m pip install --upgrade pip - python -m pip install tox - - name: Test with tox + architecture: x64 + - name: Run unit tests + shell: cmd # Run automated/unit tests - run: tox - - name: Lint with flake8 + run: .\rununittests.bat + - name: Lint + shell: cmd # Check code with the linter - run: .\runlint.ps1 + run: .\runlint.bat - name: Validate metadata + shell: cmd # E2E: test to check the script can be run, no need to actually test the file. # The internal checks are covered with unit tests. - run: .\runvalidate.ps1 --dry-run _test/testData/addons/fake/13.0.json _tests\testData\nvdaAPIVersions.json + run: .\runvalidate.bat --dry-run _test/testData/addons/fake/13.0.json tests\testData\nvdaAPIVersions.json - name: Get sha256 + shell: cmd # E2E: test to check the script can be run - run: .\runsha.ps1 _tests\testData\fake.nvda-addon + run: .\runsha.bat tests\testData\fake.nvda-addon - name: Generate json file + shell: cmd # E2E: test to check the script can be run - run: .\runcreatejson.ps1 -f _tests\testData\fake.nvda-addon --dir _tests\testOutput\test_runcreatejson --channel=stable --publisher=fakepublisher --sourceUrl=https://github.com/fake/ --url=https://github.com/fake.nvda-addon --licName="GPL v2" --licUrl="https://www.gnu.org/licenses/gpl-2.0.html" + run: .\runcreatejson.bat -f tests\testData\fake.nvda-addon --dir tests\testOutput\test_runcreatejson --channel=stable --publisher=fakepublisher --sourceUrl=https://github.com/fake/ --url=https://github.com/fake.nvda-addon --licName="GPL v2" --licUrl="https://www.gnu.org/licenses/gpl-2.0.html" diff --git a/.gitignore b/.gitignore index c7fce1c..523c646 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,4 @@ -.tox .venv __pycache__ -_tests/testOutput +testOutput +*.egg-info diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..e3ce905 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,105 @@ +# https://pre-commit.ci/ +# Configuration for Continuous Integration service +ci: + # Can't run Windows scons scripts on Linux. + # Pyright does not seem to work in pre-commit CI + skip: [unitTest, pyright] + autoupdate_schedule: monthly + autoupdate_commit_msg: "Pre-commit auto-update" + autofix_commit_msg: "Pre-commit auto-fix" + submodules: true + +default_language_version: + python: python3.13 + +repos: +- repo: https://github.com/pre-commit-ci/pre-commit-ci-config + rev: v1.6.1 + hooks: + - id: check-pre-commit-ci-config + +- repo: meta + hooks: + # ensures that exclude directives apply to any file in the repository. + - id: check-useless-excludes + # ensures that the configured hooks apply to at least one file in the repository. + - id: check-hooks-apply + +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v6.0.0 + hooks: + # Prevents commits to certain branches + - id: no-commit-to-branch + args: ["--branch", "main"] + # Checks that large files have not been added. Default cut-off for "large" files is 500kb. + - id: check-added-large-files + # Checks python syntax + - id: check-ast + # Checks for filenames that will conflict on case insensitive filesystems (the majority of Windows filesystems, most of the time) + - id: check-case-conflict + # Checks for artifacts from resolving merge conflicts. + - id: check-merge-conflict + # Checks Python files for debug statements, such as python's breakpoint function, or those inserted by some IDEs. + - id: debug-statements + # Removes trailing whitespace. + - id: trailing-whitespace + types_or: [python, batch, markdown, toml, yaml, powershell] + # Ensures all files end in 1 (and only 1) newline. + - id: end-of-file-fixer + types_or: [python, batch, markdown, toml, yaml, powershell] + # Removes the UTF-8 BOM from files that have it. + # See https://github.com/nvaccess/nvda/blob/master/projectDocs/dev/codingStandards.md#encoding + - id: fix-byte-order-marker + types_or: [python, batch, markdown, toml, yaml, powershell] + # Validates TOML files. + - id: check-toml + # Validates YAML files. + - id: check-yaml + # Ensures that links to lines in files under version control point to a particular commit. + - id: check-vcs-permalinks + # Avoids using reserved Windows filenames. + - id: check-illegal-windows-names + # Checks that tests are named test_*.py. + - id: name-tests-test + args: ["--unittest"] + +- repo: https://github.com/asottile/add-trailing-comma + rev: v3.2.0 + hooks: + # Ruff preserves indent/new-line formatting of function arguments, list items, and similar iterables, + # if a trailing comma is added. + # This adds a trailing comma to args/iterable items in case it was missed. + - id: add-trailing-comma + +- repo: https://github.com/astral-sh/ruff-pre-commit + # Matches Ruff version in pyproject. + rev: v0.13.0 + hooks: + - id: ruff + name: lint with ruff + args: [ --fix ] + - id: ruff-format + name: format with ruff + +- repo: https://github.com/RobertCraigie/pyright-python + rev: v1.1.405 + hooks: + - id: pyright + name: Check types with pyright + +- repo: https://github.com/astral-sh/uv-pre-commit + rev: 0.8.17 + hooks: + - id: uv-lock + name: Verify uv lock file + # Override python interpreter from .python-versions as that is too strict for pre-commit.ci + args: ["-p3.13"] + +- repo: local + hooks: + - id: unitTest + name: unit tests + entry: ./rununittests.bat + language: script + pass_filenames: false + types_or: [python, batch] diff --git a/.python-version b/.python-version new file mode 100644 index 0000000..ad929f8 --- /dev/null +++ b/.python-version @@ -0,0 +1 @@ +cpython-3.13-windows-x86_64-none diff --git a/README.md b/README.md index 825d81e..26172d4 100644 --- a/README.md +++ b/README.md @@ -13,7 +13,7 @@ The Action aims to validate the metadata of add-ons submitted to * The `*.nvda-addon` file can be downloaded * The Sha256 of the downloaded `*.nvda-addon` file matches. * Check data matches the addon's manifest file. - * The manifest exists in the downloaded `*.nvda-addon` file and can be loaded by the `AddonManifest` class. + * The manifest exists in the downloaded `*.nvda-addon` file and can be loaded by the `AddonManifest` class. * The submission addonName matches the manifest summary field * The submission description matches the manifest description field * The homepage URL matches the manifest URL field @@ -40,8 +40,7 @@ From cmd.exe: To test the scripts used in this action, you can run the unit tests. -1. Install [tox](https://pypi.org/project/tox): `pip install tox` -1. `tox` +1. Install [uv](https://docs.astral.sh/uv/getting-started/installation/) ## Python linting diff --git a/_validate/addonManifest.py b/_validate/addonManifest.py index 9ae4d4d..7be77e3 100644 --- a/_validate/addonManifest.py +++ b/_validate/addonManifest.py @@ -1,32 +1,25 @@ -#!/usr/bin/env python - -# Copyright (C) 2022-2023 NV Access Limited +# Copyright (C) 2022-2025 NV Access Limited # This file may be used under the terms of the GNU General Public License, version 2 or later. # For more details see: https://www.gnu.org/licenses/gpl-2.0.html -import os -import sys -from typing import ( - Optional, - TextIO, - Tuple, -) -from io import StringIO +from io import StringIO, TextIOBase +from typing import Any, cast from configobj import ConfigObj from configobj.validate import Validator, ValidateError -sys.path.append(os.path.dirname(__file__)) -# E402 module level import not at top of file -from majorMinorPatch import MajorMinorPatch # noqa:E402 -del sys.path[-1] +from .majorMinorPatch import MajorMinorPatch + +ApiVersionT = tuple[int, int, int] # major, minor, patch class AddonManifest(ConfigObj): """From the NVDA addonHandler module. Should be kept in sync. - Add-on manifest file. It contains metadata about an NVDA add-on package. """ - configspec = ConfigObj(StringIO( - """ + Add-on manifest file. It contains metadata about an NVDA add-on package.""" + + configspec = ConfigObj( + StringIO( + """ # NVDA Add-on Manifest configuration specification # Add-on unique name # Suggested convention is lowerCamelCase. @@ -66,56 +59,59 @@ class AddonManifest(ConfigObj): # "0.0.0" is also valid. # The final integer can be left out, and in that case will default to 0. E.g. 2019.1 + """, + ), + ) + + def __init__(self, input: str | TextIOBase, translatedInput: str | None = None): """ - )) + Constructs an :class:`AddonManifest` instance from manifest string data. - def __init__(self, input: TextIO, translatedInput: Optional[TextIO] = None): - """ Constructs an L{AddonManifest} instance from manifest string data - @param input: data to read the manifest information - @param translatedInput: translated manifest input + :param input: data to read the manifest information. Can be a filename or a file-like object. + :param translatedInput: translated manifest input """ - super().__init__( + super().__init__( # type: ignore[reportUnknownMemberType] input, configspec=self.configspec, - encoding='utf-8', - default_encoding='utf-8', + encoding="utf-8", + default_encoding="utf-8", ) - self._errors: Optional[str] = None - val = Validator({"apiVersion": validate_apiVersionString}) - result = self.validate(val, copy=True, preserve_errors=True) + self._errors: str | None = None + validator = Validator({"apiVersion": validate_apiVersionString}) + result = self.validate(validator, copy=True, preserve_errors=True) # type: ignore[reportUnknownMemberType] if result is not True: self._errors = result elif self._validateApiVersionRange() is not True: self._errors = "Constraint not met: minimumNVDAVersion ({}) <= lastTestedNVDAVersion ({})".format( - self.get("minimumNVDAVersion"), - self.get("lastTestedNVDAVersion") + cast(ApiVersionT, self.get("minimumNVDAVersion")), # type: ignore[reportUnknownMemberType] + cast(ApiVersionT, self.get("lastTestedNVDAVersion")), # type: ignore[reportUnknownMemberType] ) self._translatedConfig = None if translatedInput is not None: - self._translatedConfig = ConfigObj(translatedInput, encoding='utf-8', default_encoding='utf-8') - for key in ('summary', 'description'): - val = self._translatedConfig.get(key) + self._translatedConfig = ConfigObj(translatedInput, encoding="utf-8", default_encoding="utf-8") + for key in ("summary", "description"): + val: str = self._translatedConfig.get(key) # type: ignore[reportUnknownMemberType] if val: self[key] = val @property - def errors(self) -> str: + def errors(self) -> str | None: return self._errors def _validateApiVersionRange(self) -> bool: - lastTested = self.get("lastTestedNVDAVersion") - minRequiredVersion = self.get("minimumNVDAVersion") + lastTested = cast(ApiVersionT, self.get("lastTestedNVDAVersion")) # type: ignore[reportUnknownMemberType] + minRequiredVersion = cast(ApiVersionT, self.get("minimumNVDAVersion")) # type: ignore[reportUnknownMemberType] return minRequiredVersion <= lastTested -def validate_apiVersionString(value: str) -> Tuple[int, int, int]: +def validate_apiVersionString(value: str | Any) -> ApiVersionT: """From the NVDA addonHandler module. Should be kept in sync.""" if not value or value == "None": return (0, 0, 0) if not isinstance(value, str): raise ValidateError( "Expected an apiVersion in the form of a string. " - f"e.g. '2019.1.0' instead of {value} (type {type(value)})" + f"e.g. '2019.1.0' instead of {value} (type {type(value)})", ) try: versionParsed = MajorMinorPatch.getFromStr(value) diff --git a/_validate/createJson.py b/_validate/createJson.py index febc407..f94f398 100644 --- a/_validate/createJson.py +++ b/_validate/createJson.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python - # Copyright (C) 2022-2025 Noelia Ruiz Martínez, NV Access Limited # This file may be used under the terms of the GNU General Public License, version 2 or later. # For more details see: https://www.gnu.org/licenses/gpl-2.0.html @@ -9,27 +7,39 @@ import json import argparse import os -import sys +from typing import cast import zipfile -from typing import ( - Dict, - Optional, - cast, -) - -sys.path.append(os.path.dirname(__file__)) # To allow this module to be run as a script by runcreatejson.bat -# E402 module level import not at top of file -from addonManifest import AddonManifest # noqa:E402 -from manifestLoader import getAddonManifest, getAddonManifestLocalizations # noqa:E402 -from majorMinorPatch import MajorMinorPatch # noqa:E402 -import sha256 # noqa:E402 -del sys.path[-1] +from .addonManifest import AddonManifest, ApiVersionT +from .manifestLoader import getAddonManifest, getAddonManifestLocalizations +from .majorMinorPatch import MajorMinorPatch +from .sha256 import sha256_checksum + + +@dataclasses.dataclass +class AddonData: + addonId: str + displayName: str + URL: str + description: str + sha256: str + addonVersionName: str + addonVersionNumber: dict[str, int] + minNVDAVersion: dict[str, int] + lastTestedVersion: dict[str, int] + channel: str + publisher: str + sourceURL: str + license: str + homepage: str | None + licenseURL: str | None + submissionTime: int + translations: list[dict[str, str]] def getSha256(addonPath: str) -> str: with open(addonPath, "rb") as f: - sha256Addon = sha256.sha256_checksum(f) + sha256Addon = sha256_checksum(f) return sha256Addon @@ -38,17 +48,17 @@ def getCurrentTime() -> int: def generateJsonFile( - manifest: AddonManifest, - addonPath: str, - parentDir: str, - channel: str, - publisher: str, - sourceUrl: str, - url: str, - licenseName: str, - licenseUrl: Optional[str], + manifest: AddonManifest, + addonPath: str, + parentDir: str, + channel: str, + publisher: str, + sourceUrl: str, + url: str, + licenseName: str, + licenseUrl: str | None, ) -> None: - data = _createDictMatchingJsonSchema( + data = _createDataclassMatchingJsonSchema( manifest=manifest, sha=getSha256(addonPath), channel=channel, @@ -62,84 +72,80 @@ def generateJsonFile( filePath = buildOutputFilePath(data, parentDir) with open(filePath, "wt", encoding="utf-8") as f: - json.dump(data, f, indent="\t", ensure_ascii=False) + json.dump(dataclasses.asdict(data), f, indent="\t", ensure_ascii=False) print(f"Wrote json file: {filePath}") -def buildOutputFilePath(data, parentDir) -> os.PathLike: - addonDir = os.path.join(parentDir, data["addonId"]) - versionNumber = MajorMinorPatch(**data["addonVersionNumber"]) - canonicalVersionString = ".".join( - (str(i) for i in dataclasses.astuple(versionNumber)) - ) +def buildOutputFilePath(data: AddonData, parentDir: str) -> os.PathLike[str]: + addonDir = os.path.join(parentDir, data.addonId) + versionNumber = MajorMinorPatch(**data.addonVersionNumber) + canonicalVersionString = ".".join((str(i) for i in dataclasses.astuple(versionNumber))) if not os.path.isdir(addonDir): os.makedirs(addonDir) - filePath = os.path.join(addonDir, f'{canonicalVersionString}.json') - return cast(os.PathLike, filePath) - - -def _createDictMatchingJsonSchema( - manifest: AddonManifest, - sha: str, - channel: str, - publisher: str, - sourceUrl: str, - url: str, - licenseName: str, - licenseUrl: Optional[str], -) -> Dict[str, str]: + filePath = os.path.join(addonDir, f"{canonicalVersionString}.json") + return cast(os.PathLike[str], filePath) + + +def _createDataclassMatchingJsonSchema( + manifest: AddonManifest, + sha: str, + channel: str, + publisher: str, + sourceUrl: str, + url: str, + licenseName: str, + licenseUrl: str | None, +) -> AddonData: """Refer to _validate/addonVersion_schema.json""" try: - addonVersionNumber = MajorMinorPatch.getFromStr(manifest["version"]) + addonVersionNumber = MajorMinorPatch.getFromStr(cast(str, manifest["version"])) except ValueError as e: - raise ValueError(f"Manifest version invalid {addonVersionNumber}") from e + raise ValueError(f"Manifest version invalid {manifest['version']}") from e - try: - addonData = { - "addonId": manifest["name"], - "displayName": manifest["summary"], - "URL": url, - "description": manifest["description"], - "sha256": sha, - "addonVersionName": manifest["version"], - "addonVersionNumber": dataclasses.asdict(addonVersionNumber), - "minNVDAVersion": dataclasses.asdict( - MajorMinorPatch(*manifest["minimumNVDAVersion"]) - ), - "lastTestedVersion": dataclasses.asdict( - MajorMinorPatch(*manifest["lastTestedNVDAVersion"]) - ), - "channel": channel, - "publisher": publisher, - "sourceURL": sourceUrl, - "license": licenseName, - } - except KeyError as e: - raise KeyError(f"Manifest missing required key '{e.args[0]}'.") from e + for key in ("name", "summary", "description", "minimumNVDAVersion", "lastTestedNVDAVersion", "version"): + if key not in manifest: + raise KeyError(f"Manifest missing required key '{key}'.") # Add optional fields - homepage = manifest.get("url") - if homepage and homepage != 'None': - # The config default is None - # which is parsed by configobj as a string not a NoneType - addonData["homepage"] = homepage - if licenseUrl: - addonData["licenseURL"] = licenseUrl - addonData["submissionTime"] = getCurrentTime() - - addonData["translations"] = [] + homepage: str | None = manifest.get("url") # type: ignore[reportUnknownMemberType] + if not homepage or homepage == "None": + homepage = None + + translations: list[dict[str, str]] = [] for langCode, manifest in getAddonManifestLocalizations(manifest): try: - addonData["translations"].append( + translations.append( { "language": langCode, - "displayName": manifest["summary"], - "description": manifest["description"], - } + "displayName": cast(str, manifest["summary"]), + "description": cast(str, manifest["description"]), + }, ) except KeyError as e: raise KeyError(f"Translation for {langCode} missing required key '{e.args[0]}'.") from e + addonData = AddonData( + addonId=cast(str, manifest["name"]), + displayName=cast(str, manifest["summary"]), + URL=url, + description=cast(str, manifest["description"]), + sha256=sha, + addonVersionName=cast(str, manifest["version"]), + addonVersionNumber=dataclasses.asdict(addonVersionNumber), + minNVDAVersion=dataclasses.asdict(MajorMinorPatch(*cast(tuple[int], manifest["minimumNVDAVersion"]))), + lastTestedVersion=dataclasses.asdict( + MajorMinorPatch(*cast(ApiVersionT, manifest["lastTestedNVDAVersion"])), + ), + channel=channel, + publisher=publisher, + sourceURL=sourceUrl, + license=licenseName, + homepage=homepage, + licenseURL=licenseUrl, + submissionTime=getCurrentTime(), + translations=translations, + ) + return addonData @@ -201,7 +207,7 @@ def main(): required=False, ) args = parser.parse_args() - errorFilePath: Optional[str] = args.errorOutputFile + errorFilePath: str | None = args.errorOutputFile try: manifest = getAddonManifest(args.file) @@ -243,5 +249,5 @@ def main(): raise -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/_validate/majorMinorPatch.py b/_validate/majorMinorPatch.py index 2737315..1239916 100644 --- a/_validate/majorMinorPatch.py +++ b/_validate/majorMinorPatch.py @@ -1,4 +1,4 @@ -# Copyright (C) 2023 Noelia Ruiz Martínez, NV Access Limited +# Copyright (C) 2023-2025 Noelia Ruiz Martínez, NV Access Limited # This file may be used under the terms of the GNU General Public License, version 2 or later. # For more details see: https://www.gnu.org/licenses/gpl-2.0.html @@ -20,7 +20,7 @@ def getFromStr(cls, version: str) -> "MajorMinorPatch": return cls( major=int(versionParts[0]), minor=int(versionParts[1]), - patch=0 if len(versionParts) == 2 else int(versionParts[2]) + patch=0 if len(versionParts) == 2 else int(versionParts[2]), ) def __str__(self) -> str: diff --git a/_validate/manifestLoader.py b/_validate/manifestLoader.py index f63433c..8e40e2f 100644 --- a/_validate/manifestLoader.py +++ b/_validate/manifestLoader.py @@ -1,20 +1,22 @@ -# Copyright (C) 2022 Noelia Ruiz Martínez, NV Access Limited +# Copyright (C) 2022-2025 Noelia Ruiz Martínez, NV Access Limited # This file may be used under the terms of the GNU General Public License, version 2 or later. # For more details see: https://www.gnu.org/licenses/gpl-2.0.html +from collections.abc import Generator from glob import glob import os import pathlib import shutil -from typing import Generator, Tuple -import zipfile -from addonManifest import AddonManifest import tempfile +import zipfile + +from .addonManifest import AddonManifest + TEMP_DIR = tempfile.gettempdir() def getAddonManifest(addonPath: str) -> AddonManifest: - """ Extract manifest.ini from *.nvda-addon and parse. + """Extract manifest.ini from *.nvda-addon and parse. Raise on error. """ extractDir = os.path.join(TEMP_DIR, "tempAddon") @@ -33,9 +35,9 @@ def getAddonManifest(addonPath: str) -> AddonManifest: def getAddonManifestLocalizations( - manifest: AddonManifest -) -> Generator[Tuple[str, AddonManifest], None, None]: - """ Extract data from translated manifest.ini from *.nvda-addon and parse. + manifest: AddonManifest, +) -> Generator[tuple[str, AddonManifest], None, None]: + """Extract data from translated manifest.ini from *.nvda-addon and parse. Raise on error. """ if manifest.filename is None: diff --git a/_validate/regenerateTranslations.py b/_validate/regenerateTranslations.py index 9ead97b..0f015b7 100644 --- a/_validate/regenerateTranslations.py +++ b/_validate/regenerateTranslations.py @@ -1,27 +1,16 @@ -#!/usr/bin/env python - -# Copyright (C) 2023 NV Access Limited +# Copyright (C) 2023-2025 NV Access Limited # This file may be used under the terms of the GNU General Public License, version 2 or later. # For more details see: https://www.gnu.org/licenses/gpl-2.0.html import argparse import glob import json -import os -import sys from urllib.request import urlretrieve -from typing import ( - Optional, -) - -sys.path.append(os.path.dirname(__file__)) # To allow this module to be run as a script by runcreatejson.bat -# E402 module level import not at top of file -from manifestLoader import getAddonManifest, getAddonManifestLocalizations # noqa:E402 -del sys.path[-1] +from .manifestLoader import getAddonManifest, getAddonManifestLocalizations -def regenerateJsonFile(filePath: str, errorFilePath: Optional[str]) -> None: +def regenerateJsonFile(filePath: str, errorFilePath: str | None) -> None: with open(filePath, encoding="utf-8") as f: addonData = json.load(f) if addonData.get("legacy"): @@ -41,9 +30,9 @@ def regenerateJsonFile(filePath: str, errorFilePath: Optional[str]) -> None: "language": langCode, "displayName": manifest["summary"], "description": manifest["description"], - } + }, ) - + with open(filePath, "wt", encoding="utf-8") as f: json.dump(addonData, f, indent="\t", ensure_ascii=False) print(f"Wrote json file: {filePath}") @@ -64,10 +53,10 @@ def main(): default=None, ) args = parser.parse_args() - errorFilePath: Optional[str] = args.errorOutputFile + errorFilePath: str | None = args.errorOutputFile for addonJsonFile in glob.glob(f"{args.parentDir}/**/*.json"): regenerateJsonFile(addonJsonFile, errorFilePath) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/_validate/sha256.py b/_validate/sha256.py index 4455d90..c3e120c 100644 --- a/_validate/sha256.py +++ b/_validate/sha256.py @@ -1,11 +1,9 @@ -#!/usr/bin/env python - -# Copyright (C) 2020 NV Access Limited +# Copyright (C) 2020-2025 NV Access Limited # This file may be used under the terms of the GNU General Public License, version 2 or later. # For more details see: https://www.gnu.org/licenses/gpl-2.0.html -import hashlib import argparse +import hashlib import typing #: The read size for each chunk read from the file, prevents memory overuse with large files. @@ -19,9 +17,9 @@ def sha256_checksum(binaryReadModeFile: typing.BinaryIO, blockSize: int = BLOCK_ :return: The Sha256 hex digest. """ sha256 = hashlib.sha256() - assert binaryReadModeFile.readable() and binaryReadModeFile.mode == 'rb' + assert binaryReadModeFile.readable() and binaryReadModeFile.mode == "rb" f = binaryReadModeFile - for block in iter(lambda: f.read(blockSize), b''): + for block in iter(lambda: f.read(blockSize), b""): sha256.update(block) return sha256.hexdigest() @@ -29,14 +27,14 @@ def sha256_checksum(binaryReadModeFile: typing.BinaryIO, blockSize: int = BLOCK_ def main(): parser = argparse.ArgumentParser() parser.add_argument( - type=argparse.FileType('rb'), + type=argparse.FileType("rb"), dest="file", - help="The NVDA addon (*.nvda-addon) to use when computing the sha256." + help="The NVDA addon (*.nvda-addon) to use when computing the sha256.", ) args = parser.parse_args() checksum = sha256_checksum(args.file) print(f"Sha256:\t {checksum}") -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/_validate/validate.py b/_validate/validate.py index 17cd2d7..28402d8 100644 --- a/_validate/validate.py +++ b/_validate/validate.py @@ -1,38 +1,26 @@ -#!/usr/bin/env python - -# Copyright (C) 2021-2023 Noelia Ruiz Martínez, NV Access Limited +# Copyright (C) 2021-2025 Noelia Ruiz Martínez, NV Access Limited # This file may be used under the terms of the GNU General Public License, version 2 or later. # For more details see: https://www.gnu.org/licenses/gpl-2.0.html import argparse +from collections.abc import Generator from glob import glob import json import os import re -import sys -from typing import ( - Any, - Dict, - Generator, - Iterable, - List, - Optional, -) +from typing import Any, cast import urllib.request from jsonschema import validate, exceptions -sys.path.append(os.path.dirname(__file__)) # To allow this module to be run as a script by runValidate.bat -# E402 module level import not at top of file -import sha256 # noqa:E402 -from addonManifest import AddonManifest # noqa:E402 -from manifestLoader import getAddonManifest, TEMP_DIR # noqa:E402 -from majorMinorPatch import MajorMinorPatch # noqa:E402 -del sys.path[-1] +from .addonManifest import AddonManifest, ApiVersionT +from .manifestLoader import getAddonManifest, TEMP_DIR +from .majorMinorPatch import MajorMinorPatch +from .sha256 import sha256_checksum JSON_SCHEMA = os.path.join(os.path.dirname(__file__), "addonVersion_schema.json") -JsonObjT = Dict[str, Any] +JsonObjT = dict[str, Any] ValidationErrorGenerator = Generator[str, None, None] @@ -44,23 +32,21 @@ def getAddonMetadata(filename: str) -> JsonObjT: """ with open(filename, encoding="utf-8") as f: data: JsonObjT = json.load(f) - _validateJson(data) + validateJson(data) return data -def getExistingVersions(verFilename: str) -> List[str]: - """Loads API versions file and returns list of versions formatted as strings. - """ +def getExistingVersions(verFilename: str) -> list[str]: + """Loads API versions file and returns list of versions formatted as strings.""" with open(verFilename, encoding="utf-8") as f: - data: List[JsonObjT] = json.load(f) + data: list[JsonObjT] = json.load(f) return [_formatVersionString(version["apiVer"].values()) for version in data] -def getExistingStableVersions(verFilename: str) -> List[str]: - """Loads API versions file and returns list of stable versions formatted as strings. - """ +def getExistingStableVersions(verFilename: str) -> list[str]: + """Loads API versions file and returns list of stable versions formatted as strings.""" with open(verFilename, encoding="utf-8") as f: - data: List[JsonObjT] = json.load(f) + data: list[JsonObjT] = json.load(f) return [ _formatVersionString(version["apiVer"].values()) for version in data @@ -68,8 +54,8 @@ def getExistingStableVersions(verFilename: str) -> List[str]: ] -def _validateJson(data: JsonObjT) -> None: - """ Ensure that the loaded metadata conforms to the schema. +def validateJson(data: JsonObjT) -> None: + """Ensure that the loaded metadata conforms to the schema. Raise error if not """ with open(JSON_SCHEMA, encoding="utf-8") as f: @@ -123,15 +109,14 @@ def checkSha256(addonPath: str, expectedSha: str) -> ValidationErrorGenerator: Return an error if it does not match the expected. """ with open(addonPath, "rb") as f: - sha256Addon = sha256.sha256_checksum(f) + sha256Addon = sha256_checksum(f) if sha256Addon.upper() != expectedSha.upper(): yield f"Sha256 of .nvda-addon at URL is: {sha256Addon}" def checkSummaryMatchesDisplayName(manifest: AddonManifest, submission: JsonObjT) -> ValidationErrorGenerator: - """ The submission Name must match the *.nvda-addon manifest summary field. - """ - summary = manifest["summary"] + """The submission Name must match the *.nvda-addon manifest summary field.""" + summary = cast(str, manifest["summary"]) if summary != submission["displayName"]: yield ( f"Submission 'displayName' must be set to '{summary}' in json file." @@ -140,8 +125,8 @@ def checkSummaryMatchesDisplayName(manifest: AddonManifest, submission: JsonObjT def checkDescriptionMatches(manifest: AddonManifest, submission: JsonObjT) -> ValidationErrorGenerator: - """ The submission description must match the *.nvda-addon manifest description field.""" - description = manifest["description"] + """The submission description must match the *.nvda-addon manifest description field.""" + description = cast(str, manifest["description"]) if description != submission["description"]: yield ( f"Submission 'description' must be set to '{description}' in json file." @@ -150,32 +135,29 @@ def checkDescriptionMatches(manifest: AddonManifest, submission: JsonObjT) -> Va def checkUrlMatchesHomepage(manifest: AddonManifest, submission: JsonObjT) -> ValidationErrorGenerator: - """ The submission homepage must match the *.nvda-addon manifest url field. - """ - manifestUrl = manifest.get("url") - if manifestUrl == 'None': + """The submission homepage must match the *.nvda-addon manifest url field.""" + manifestUrl = manifest.get("url") # type: ignore[reportUnknownMemberType] + if manifestUrl == "None": # The config default is None which is parsed by configobj as a string not a NoneType manifestUrl = None if manifestUrl != submission.get("homepage"): - yield f"Submission 'homepage' must be set to '{manifest.get('url')}' " \ - f"in json file instead of {submission.get('homepage')}" + yield ( + f"Submission 'homepage' must be set to '{manifest.get('url')}' " # type: ignore[reportUnknownMemberType] + f"in json file instead of {submission.get('homepage')}" + ) def checkAddonId( - manifest: AddonManifest, - submissionFilePath: str, - submission: JsonObjT, + manifest: AddonManifest, + submissionFilePath: str, + submission: JsonObjT, ) -> ValidationErrorGenerator: - """ The submitted json file must be placed in a folder matching the *.nvda-addon manifest name field. - """ - expectedName = manifest["name"] + """The submitted json file must be placed in a folder matching the *.nvda-addon manifest name field.""" + expectedName = cast(str, manifest["name"]) idInPath = os.path.basename(os.path.dirname(submissionFilePath)) if expectedName != idInPath: - yield ( - "Submitted json file must be placed in a folder matching" - f" the addonId/name '{expectedName}'" - ) - if expectedName != submission['addonId']: + yield (f"Submitted json file must be placed in a folder matching the addonId/name '{expectedName}'") + if expectedName != submission["addonId"]: yield ( "Submission data 'addonId' field does not match 'name' field in addon manifest:" f" {expectedName} vs {submission['addonId']}" @@ -192,14 +174,13 @@ def checkAddonId( VERSION_PARSE = re.compile(r"^(\d+)(?:$|(?:\.(\d+)$)|(?:\.(\d+)\.(\d+)$))") -def parseVersionStr(ver: str) -> Dict[str, int]: - +def parseVersionStr(ver: str) -> dict[str, int]: matches = VERSION_PARSE.match(ver) if not matches: return { "major": 0, "minor": 0, - "patch": 0 + "patch": 0, } groups = list(x for x in matches.groups() if x) @@ -207,27 +188,23 @@ def parseVersionStr(ver: str) -> Dict[str, int]: version = { "major": int(groups[0]), "minor": int(groups[1]), - "patch": int(groups[2]) + "patch": int(groups[2]), } return version -def _formatVersionString(versionValues: Iterable) -> str: - versionValues = list(versionValues) - assert 1 < len(versionValues) < 4 - return ".".join( - str(x) for x in versionValues - ) +def _formatVersionString(versionValues: ApiVersionT) -> str: + return ".".join(str(x) for x in versionValues) def checkSubmissionFilenameMatchesVersionNumber( - submissionFilePath: str, - submission: JsonObjT, + submissionFilePath: str, + submission: JsonObjT, ) -> ValidationErrorGenerator: versionFromPath: str = os.path.splitext(os.path.basename(submissionFilePath))[0] - versionNumber: JsonObjT = submission['addonVersionNumber'] - formattedVersionNumber = _formatVersionString(versionNumber.values()) + versionNumber: dict[str, int] = submission["addonVersionNumber"] + formattedVersionNumber = _formatVersionString(cast(ApiVersionT, tuple(versionNumber.values()))) if versionFromPath != formattedVersionNumber: # yield f"Submitted json file should be named '{formattedVersionNumber}.json'" yield ( @@ -238,24 +215,22 @@ def checkSubmissionFilenameMatchesVersionNumber( ) -def checkParsedVersionNameMatchesVersionNumber( - submission: JsonObjT -) -> ValidationErrorGenerator: - versionNumber: JsonObjT = submission['addonVersionNumber'] - versionName: str = submission['addonVersionName'] +def checkParsedVersionNameMatchesVersionNumber(submission: JsonObjT) -> ValidationErrorGenerator: + versionNumber: dict[str, int] = submission["addonVersionNumber"] + versionName: str = submission["addonVersionName"] parsedVersion = parseVersionStr(versionName) if parsedVersion != versionNumber: yield ( "Warning: submission data 'addonVersionName' and 'addonVersionNumber' mismatch." - f" Unable to parse: {versionName} and match with {_formatVersionString(versionNumber.values())}" + f" Unable to parse: {versionName} and match with {_formatVersionString(cast(ApiVersionT, tuple(versionNumber.values())))}" ) def checkManifestVersionMatchesVersionName( - manifest: AddonManifest, - submission: JsonObjT + manifest: AddonManifest, + submission: JsonObjT, ) -> ValidationErrorGenerator: - manifestVersion: str = manifest["version"] + manifestVersion: str = cast(str, manifest["version"]) addonVersionName: str = submission["addonVersionName"] if manifestVersion != addonVersionName: yield ( @@ -264,11 +239,8 @@ def checkManifestVersionMatchesVersionName( ) -def checkMinNVDAVersionMatches( - manifest: AddonManifest, - submission: JsonObjT -) -> ValidationErrorGenerator: - manifestMinimumNVDAVersion = MajorMinorPatch(*manifest["minimumNVDAVersion"]) +def checkMinNVDAVersionMatches(manifest: AddonManifest, submission: JsonObjT) -> ValidationErrorGenerator: + manifestMinimumNVDAVersion = MajorMinorPatch(*cast(ApiVersionT, manifest["minimumNVDAVersion"])) minNVDAVersion = MajorMinorPatch(**submission["minNVDAVersion"]) if manifestMinimumNVDAVersion != minNVDAVersion: yield ( @@ -278,10 +250,10 @@ def checkMinNVDAVersionMatches( def checkLastTestedNVDAVersionMatches( - manifest: AddonManifest, - submission: JsonObjT + manifest: AddonManifest, + submission: JsonObjT, ) -> ValidationErrorGenerator: - manifestLastTestedNVDAVersion = MajorMinorPatch(*manifest["lastTestedNVDAVersion"]) + manifestLastTestedNVDAVersion = MajorMinorPatch(*cast(ApiVersionT, manifest["lastTestedNVDAVersion"])) lastTestedVersion = MajorMinorPatch(**submission["lastTestedVersion"]) if manifestLastTestedNVDAVersion != lastTestedVersion: yield ( @@ -291,44 +263,42 @@ def checkLastTestedNVDAVersionMatches( def checkLastTestedVersionExist(submission: JsonObjT, verFilename: str) -> ValidationErrorGenerator: - lastTestedVersion: JsonObjT = submission['lastTestedVersion'] - formattedLastTestedVersion: str = _formatVersionString(lastTestedVersion.values()) + lastTestedVersion: dict[str, int] = submission["lastTestedVersion"] + formattedLastTestedVersion: str = _formatVersionString(cast(ApiVersionT, lastTestedVersion.values())) if formattedLastTestedVersion not in getExistingVersions(verFilename): yield f"Last tested version error: {formattedLastTestedVersion} doesn't exist" - elif ( - submission["channel"] == "stable" - and formattedLastTestedVersion not in getExistingStableVersions(verFilename) + elif submission["channel"] == "stable" and formattedLastTestedVersion not in getExistingStableVersions( + verFilename, ): - yield f"Last tested version error: {formattedLastTestedVersion} is not stable yet. " + \ - "Please submit add-on using the beta or dev channel." + yield ( + f"Last tested version error: {formattedLastTestedVersion} is not stable yet. " + + "Please submit add-on using the beta or dev channel." + ) def checkMinRequiredVersionExist(submission: JsonObjT, verFilename: str) -> ValidationErrorGenerator: - minRequiredVersion: JsonObjT = submission["minNVDAVersion"] - formattedMinRequiredVersion: str = _formatVersionString(minRequiredVersion.values()) + minRequiredVersion: dict[str, int] = submission["minNVDAVersion"] + formattedMinRequiredVersion: str = _formatVersionString(cast(ApiVersionT, minRequiredVersion.values())) if formattedMinRequiredVersion not in getExistingVersions(verFilename): yield f"Minimum required version error: {formattedMinRequiredVersion} doesn't exist" - elif ( - submission["channel"] == "stable" - and formattedMinRequiredVersion not in getExistingStableVersions(verFilename) + elif submission["channel"] == "stable" and formattedMinRequiredVersion not in getExistingStableVersions( + verFilename, ): - yield f"Minimum required version error: {formattedMinRequiredVersion} is not stable yet. " + \ - "Please submit add-on using the beta or dev channel." + yield ( + f"Minimum required version error: {formattedMinRequiredVersion} is not stable yet. " + + "Please submit add-on using the beta or dev channel." + ) def checkVersions( - manifest: AddonManifest, - submissionFilePath: str, - submission: JsonObjT + manifest: AddonManifest, + submissionFilePath: str, + submission: JsonObjT, ) -> ValidationErrorGenerator: - """Check submitted json file name matches the *.nvda-addon manifest name field. - """ - yield from checkSubmissionFilenameMatchesVersionNumber( - submissionFilePath, - submission - ) + """Check submitted json file name matches the *.nvda-addon manifest name field.""" + yield from checkSubmissionFilenameMatchesVersionNumber(submissionFilePath, submission) yield from checkManifestVersionMatchesVersionName(manifest, submission) yield from checkParsedVersionNameMatchesVersionNumber(submission) @@ -372,7 +342,7 @@ def validateSubmission(submissionFilePath: str, verFilename: str) -> ValidationE yield f"Fatal error, unable to continue: {e}" -def outputErrors(addonFileName: str, errors: List[str], errorFilePath: Optional[str] = None): +def outputErrors(addonFileName: str, errors: list[str], errorFilePath: str | None = None): if len(errors) > 0: print("\r\n".join(errors)) if errorFilePath: @@ -386,15 +356,15 @@ def main(): "--dry-run", action="store_true", default=False, - help="Ensures the correct arguments are passed, doesn't run checks, exists with success." + help="Ensures the correct arguments are passed, doesn't run checks, exists with success.", ) parser.add_argument( dest="filePathGlob", - help="The json (.json) files containing add-on metadata. e.g. addons/*/*.json." + help="The json (.json) files containing add-on metadata. e.g. addons/*/*.json.", ) parser.add_argument( dest="APIVersions", - help="The JSON file containing valid NVDA API versions." + help="The JSON file containing valid NVDA API versions.", ) parser.add_argument( "--output", @@ -404,7 +374,7 @@ def main(): ) args = parser.parse_args() - addonFiles: List[str] = glob(args.filePathGlob) + addonFiles: list[str] = glob(args.filePathGlob) verFilename: str = args.APIVersions errorOutputFile: str = args.errorOutputFile if errorOutputFile and os.path.exists(errorOutputFile): @@ -425,5 +395,5 @@ def main(): print(f"No validation errors for {args.filePathGlob}") -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..5b548ff --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,118 @@ +[build-system] +requires = ["setuptools~=72.0", "wheel"] +build-backend = "setuptools.build_meta" + +[project] +name = "addon-datastore-validation" +dynamic = ["version"] +description = "Add-on datastore validation" +maintainers = [ + {name = "NV Access", email = "info@nvaccess.org"}, +] +requires-python = ">=3.13.0, <3.14" +classifiers = [ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "License :: OSI Approved :: GNU General Public License v3", + "Programming Language :: Python :: 3", + "Topic :: Accessibility", +] +readme = "readme.md" +license = {file = "LICENSE"} +dependencies = [ + "configobj", + "jsonschema==4.25.1", +] + +[project.urls] +Homepage = "https://www.nvaccess.org/" +Repository = "https://github.com/nvaccess/addon-datastore-validation.git" +Issues = "https://github.com/nvaccess/addon-datastore-validation/issues" + +[tool.pyright] +venvPath = ".venv" +venv = "." +pythonPlatform = "Windows" +typeCheckingMode = "strict" + +include = [ + "**/*.py", +] + +exclude = [ + ".git", + "__pycache__", + ".venv", +] + +# While exclude tells pyright not to scan files in the first instance, +# it will still analyse files included by other files. +ignore = [ + # We do not care about errors in our dependencies. + ".venv", +] + +# General config +analyzeUnannotatedFunctions = true +deprecateTypingAliases = true + +# Stricter typing +strictParameterNoneValue = true +strictListInference = true +strictDictionaryInference = true +strictSetInference = true + +# ignore configobj +reportMissingTypeStubs = false + +[tool.uv] +default-groups = "all" +python-preference = "only-system" +environments = ["sys_platform == 'win32'"] +required-version = ">=0.8" + +[tool.setuptools] +package-dir = {"" = "_validate"} + +[tool.uv.sources] +configobj = { git = "https://github.com/DiffSK/configobj", rev = "8be54629ee7c26acb5c865b74c76284e80f3aa31" } + +[dependency-groups] +lint = [ + "ruff==0.13.0", + "pre-commit==4.3.0", + "pyright==1.1.405", +] + +unit-tests = [ + # Creating XML unit test reports + "unittest-xml-reporting==3.2.0", +] + +[tool.ruff] +line-length = 110 + +include = [ + "*.py", +] + +exclude = [ + ".git", + "__pycache__", + "build", + "output", + ".venv", +] + +[tool.ruff.format] +indent-style = "tab" +line-ending = "lf" + +[tool.ruff.lint.mccabe] +max-complexity = 15 + +[tool.ruff.lint] +ignore = [ + # indentation contains tabs + "W191", +] diff --git a/regenerateTranslations.bat b/regenerateTranslations.bat new file mode 100644 index 0000000..eefd38b --- /dev/null +++ b/regenerateTranslations.bat @@ -0,0 +1,10 @@ +@echo off +REM Regenerate translations for files in dir +set hereOrig=%~dp0 +set here=%hereOrig% +if #%hereOrig:~-1%# == #\# set here=%hereOrig:~0,-1% +set unitTestsPath=%here%\tests +set testOutput=%here%\testOutput +md %testOutput% + +call uv run --directory "%here%" python -m _validate.regenerateTranslations %* diff --git a/regenerateTranslations.ps1 b/regenerateTranslations.ps1 deleted file mode 100644 index 9531ea7..0000000 --- a/regenerateTranslations.ps1 +++ /dev/null @@ -1,3 +0,0 @@ -# Regenerate translations for files in dir -$ErrorActionPreference = "Stop"; -& "$PSScriptRoot\venvUtils\venvCmd" "$PSScriptRoot\_validate\regenerateTranslations.py" $args diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 6d0cea5..0000000 --- a/requirements.txt +++ /dev/null @@ -1,10 +0,0 @@ -# Dependencies -flake8==3.9.2 -# flake8-tabs version 2.3.2 gives spurious errors: -# "ET113 (flake8-tabs) use of alignment as indentation, but option continuation-style=hanging does not permit this" -flake8-tabs==2.2.2 - -# Requirements for validate -configobj @ git+https://github.com/DiffSK/configobj@8be54629ee7c26acb5c865b74c76284e80f3aa31#egg=configobj -jsonschema==4.23.0 - diff --git a/runcreatejson.bat b/runcreatejson.bat new file mode 100644 index 0000000..c6b2f37 --- /dev/null +++ b/runcreatejson.bat @@ -0,0 +1,10 @@ +@echo off +REM create json from manifest +set hereOrig=%~dp0 +set here=%hereOrig% +if #%hereOrig:~-1%# == #\# set here=%hereOrig:~0,-1% +set unitTestsPath=%here%\tests +set testOutput=%here%\testOutput +md %testOutput% + +call uv run --directory "%here%" python -m _validate.createJson %* diff --git a/runcreatejson.ps1 b/runcreatejson.ps1 deleted file mode 100644 index 332d748..0000000 --- a/runcreatejson.ps1 +++ /dev/null @@ -1,3 +0,0 @@ -# create json from manifest -$ErrorActionPreference = "Stop"; -& "$PSScriptRoot\venvUtils\venvCmd" "$PSScriptRoot\_validate\createJson.py" $args diff --git a/runlint.bat b/runlint.bat new file mode 100644 index 0000000..d9f0646 --- /dev/null +++ b/runlint.bat @@ -0,0 +1,20 @@ +@echo off +rem runlint [] +rem Lints and formats all python files +set hereOrig=%~dp0 +set here=%hereOrig% +if #%hereOrig:~-1%# == #\# set here=%hereOrig:~0,-1% + +set ruffCheckArgs= +set ruffFormatArgs= +if "%1" NEQ "" set ruffCheckArgs=--output-file=%1/PR-lint.xml --output-format=junit +if "%1" NEQ "" set ruffFormatArgs=--diff +call uv run --group lint --directory "%here%" ruff check --fix %ruffCheckArgs% +if ERRORLEVEL 1 exit /b %ERRORLEVEL% +if "%1" NEQ "" ( + call uv run --group lint --directory "%here%" ruff format %ruffFormatArgs% > %1/lint-diff.diff +) else ( + call uv run --group lint --directory "%here%" ruff format %ruffFormatArgs% +) +call uv run --group lint --directory "%here%" pyright --threads --level warning +if ERRORLEVEL 1 exit /b %ERRORLEVEL% diff --git a/runlint.ps1 b/runlint.ps1 deleted file mode 100644 index 0725137..0000000 --- a/runlint.ps1 +++ /dev/null @@ -1,3 +0,0 @@ -# lint Python files -$ErrorActionPreference = "Stop"; -& "$PSScriptRoot\venvUtils\venvCmd" -m flake8 _validate _tests --count --show-source --statistics --tee diff --git a/runsha.bat b/runsha.bat new file mode 100644 index 0000000..4e9ed05 --- /dev/null +++ b/runsha.bat @@ -0,0 +1,10 @@ +@echo off +REM print sha256 of a Python file +set hereOrig=%~dp0 +set here=%hereOrig% +if #%hereOrig:~-1%# == #\# set here=%hereOrig:~0,-1% +set unitTestsPath=%here%\tests +set testOutput=%here%\testOutput +md %testOutput% + +call uv run --directory "%here%" python -m _validate.sha256 %* diff --git a/runsha.ps1 b/runsha.ps1 deleted file mode 100644 index 6e4604a..0000000 --- a/runsha.ps1 +++ /dev/null @@ -1,3 +0,0 @@ -# print sha256 of a Python file -$ErrorActionPreference = "Stop"; -& "$PSScriptRoot\venvUtils\venvCmd" "$PSScriptRoot\_validate\sha256.py" $args diff --git a/rununittests.bat b/rununittests.bat new file mode 100644 index 0000000..dba069a --- /dev/null +++ b/rununittests.bat @@ -0,0 +1,9 @@ +@echo off +set hereOrig=%~dp0 +set here=%hereOrig% +if #%hereOrig:~-1%# == #\# set here=%hereOrig:~0,-1% +set unitTestsPath=%here%\tests +set testOutput=%here%\testOutput +md %testOutput% + +call uv run --group unit-tests --directory "%here%" -m xmlrunner discover -b -s "%unitTestsPath%" -t "%here%" --output-file "%testOutput%\unitTests.xml" %* diff --git a/runvalidate.bat b/runvalidate.bat new file mode 100644 index 0000000..a162640 --- /dev/null +++ b/runvalidate.bat @@ -0,0 +1,10 @@ +@echo off +REM validate +set hereOrig=%~dp0 +set here=%hereOrig% +if #%hereOrig:~-1%# == #\# set here=%hereOrig:~0,-1% +set unitTestsPath=%here%\tests +set testOutput=%here%\testOutput +md %testOutput% + +call uv run --directory "%here%" python -m _validate.validate %* diff --git a/runvalidate.ps1 b/runvalidate.ps1 deleted file mode 100644 index 58ed217..0000000 --- a/runvalidate.ps1 +++ /dev/null @@ -1,3 +0,0 @@ -# validate -$ErrorActionPreference = "Stop"; -& "$PSScriptRoot\venvUtils\venvCmd" "$PSScriptRoot\_validate\validate.py" $args diff --git a/_tests/__init__.py b/tests/__init__.py similarity index 100% rename from _tests/__init__.py rename to tests/__init__.py diff --git a/_tests/testData/addons/fake/13.0.0.json b/tests/testData/addons/fake/13.0.0.json similarity index 100% rename from _tests/testData/addons/fake/13.0.0.json rename to tests/testData/addons/fake/13.0.0.json diff --git a/_tests/testData/fake.nvda-addon b/tests/testData/fake.nvda-addon similarity index 100% rename from _tests/testData/fake.nvda-addon rename to tests/testData/fake.nvda-addon diff --git a/_tests/testData/manifest.ini b/tests/testData/manifest.ini similarity index 100% rename from _tests/testData/manifest.ini rename to tests/testData/manifest.ini diff --git a/_tests/testData/nvdaAPIVersions.json b/tests/testData/nvdaAPIVersions.json similarity index 100% rename from _tests/testData/nvdaAPIVersions.json rename to tests/testData/nvdaAPIVersions.json diff --git a/_tests/test_createJson.py b/tests/test_createJson.py similarity index 72% rename from _tests/test_createJson.py rename to tests/test_createJson.py index ddfd83d..6241bd7 100644 --- a/_tests/test_createJson.py +++ b/tests/test_createJson.py @@ -1,6 +1,4 @@ -#!/usr/bin/env python - -# Copyright (C) 2022-2024 Noelia Ruiz Martínez, NV Access Limited +# Copyright (C) 2022-2025 Noelia Ruiz Martínez, NV Access Limited # This file may be used under the terms of the GNU General Public License, version 2 or later. # For more details see: https://www.gnu.org/licenses/gpl-2.0.html @@ -8,28 +6,24 @@ import os import shutil import json -from _validate import ( - createJson, - addonManifest, - manifestLoader -) +from _validate import createJson, addonManifest, manifestLoader TOP_DIR = os.path.abspath(os.path.dirname(__file__)) SOURCE_DIR = os.path.dirname(TOP_DIR) -INPUT_DATA_PATH = os.path.join(SOURCE_DIR, '_tests', 'testData') +INPUT_DATA_PATH = os.path.join(SOURCE_DIR, "tests", "testData") VALID_JSON = os.path.join( INPUT_DATA_PATH, "addons", "fake", - "13.0.0.json" + "13.0.0.json", ) # json file available in testData/fake -ADDON_PACKAGE = os.path.join(INPUT_DATA_PATH, 'fake.nvda-addon') -MANIFEST_FILE = os.path.join(INPUT_DATA_PATH, 'manifest.ini') +ADDON_PACKAGE = os.path.join(INPUT_DATA_PATH, "fake.nvda-addon") +MANIFEST_FILE = os.path.join(INPUT_DATA_PATH, "manifest.ini") ADDON_CHANNEL = "testChannel" ADDON_PUBLISHER = "testPublisher" ADDON_SOURCE_URL = "https://example.com/" -OUTPUT_DATA_PATH = os.path.join(SOURCE_DIR, '_tests', 'testOutput') +OUTPUT_DATA_PATH = os.path.join(SOURCE_DIR, "tests", "testOutput") def getAddonManifest(): @@ -39,10 +33,11 @@ def getAddonManifest(): class IntegrationTestCreateJson(unittest.TestCase): - """ Integration tests. + """Integration tests. - The JSON file is created (written to the filesystem). - The output is then loaded and checked for correctness. """ + def setUp(self): self.outputDir = os.path.join(OUTPUT_DATA_PATH, "createJsonOutput") self.maxDiff = None # Permit unittest.TestCase (base class) to calculate diffs of any lengths. @@ -51,8 +46,8 @@ def setUp(self): def test_contentsMatchesExampleFile(self): # Values used must match the manifest files: - # - '_tests / testData / manifest.ini' - # - '_tests/testData/fake.nvda-addon' (unzip) + # - 'tests / testData / manifest.ini' + # - 'tests/testData/fake.nvda-addon' (unzip) manifest = getAddonManifest() createJson.generateJsonFile( manifest, @@ -66,14 +61,10 @@ def test_contentsMatchesExampleFile(self): licenseUrl="https://www.gnu.org/licenses/gpl-2.0.html", ) actualJsonPath = os.path.join(self.outputDir, "fake", "13.0.0.json") - self.assertTrue( - os.path.isfile(actualJsonPath), - f"Failed to create json file: {actualJsonPath}" - ) + self.assertTrue(os.path.isfile(actualJsonPath), f"Failed to create json file: {actualJsonPath}") self._assertJsonFilesEqual(actualJsonPath=actualJsonPath, expectedJsonPath=VALID_JSON) def _assertJsonFilesEqual(self, actualJsonPath: str, expectedJsonPath: str): - # Not equal, how are they different? with open(VALID_JSON, encoding="utf-8") as expectedFile: expectedJson = json.load(expectedFile) @@ -93,15 +84,26 @@ def setUp(self) -> None: def test_validVersion(self): outputFilePath = createJson.buildOutputFilePath( - data={ - "addonId": "testId", - "addonVersionNumber": { - "major": 1, - "minor": 2, - "patch": 0, - } - }, - parentDir=self.outputDir + data=createJson.AddonData( + addonId="addonId", + displayName="Addon name", + URL="https://example.com", + description="Addon description", + sha256="sha256checksum", + addonVersionName="1.2.0", + addonVersionNumber={"major": 1, "minor": 2, "patch": 0}, + minNVDAVersion={"major": 2023, "minor": 1, "patch": 0}, + lastTestedVersion={"major": 2023, "minor": 2, "patch": 0}, + channel="stable", + publisher="Name of addon author or organisation", + sourceURL="https://example.com", + license="GPL v2", + homepage="https://example.com", + licenseURL="https://www.gnu.org/licenses/gpl-2.0.html", + submissionTime=createJson.getCurrentTime(), + translations=[], + ), + parentDir=self.outputDir, ) dir, filename = os.path.split(outputFilePath) @@ -109,7 +111,7 @@ def test_validVersion(self): self.assertEqual( filename, "1.2.0.json", - msg="Name of the output file should be named based on version number" + msg="Name of the output file should be named based on version number", ) diff --git a/_tests/test_majorMinorPatch.py b/tests/test_majorMinorPatch.py similarity index 80% rename from _tests/test_majorMinorPatch.py rename to tests/test_majorMinorPatch.py index 11a0ac8..5f092b1 100644 --- a/_tests/test_majorMinorPatch.py +++ b/tests/test_majorMinorPatch.py @@ -1,4 +1,4 @@ -# Copyright (C) 2022-2023 Noelia Ruiz Martínez, NV Access Limited +# Copyright (C) 2022-2025 Noelia Ruiz Martínez, NV Access Limited # This file may be used under the terms of the GNU General Public License, version 2 or later. # For more details see: https://www.gnu.org/licenses/gpl-2.0.html @@ -9,16 +9,14 @@ class Test_getVersionNumber(unittest.TestCase): def test_tripleDigitVersion_isValid(self): - """ Canonical version (major, minor, patch) expected. - """ + """Canonical version (major, minor, patch) expected.""" versionNumber = MajorMinorPatch.getFromStr("1.2.3") self.assertEqual(versionNumber.major, 1) self.assertEqual(versionNumber.minor, 2) self.assertEqual(versionNumber.patch, 3) def test_doubleDigitVersion_isValid(self): - """patch is optional, assumed to be zero. - """ + """patch is optional, assumed to be zero.""" versionNumber = MajorMinorPatch.getFromStr("1.02") self.assertEqual(versionNumber.major, 1) self.assertEqual(versionNumber.minor, 2) @@ -33,7 +31,5 @@ def test_tooManyValues_raises(self): MajorMinorPatch.getFromStr("1.2.3.4") def test_versionWithNonDigit(self): - with self.assertRaises( - ValueError, - msg="Non-digit chars in version number expected as an error."): + with self.assertRaises(ValueError, msg="Non-digit chars in version number expected as an error."): MajorMinorPatch.getFromStr("1.2.3a") diff --git a/_tests/test_validate.py b/tests/test_validate.py similarity index 60% rename from _tests/test_validate.py rename to tests/test_validate.py index 59ddf73..0c59e4e 100644 --- a/_tests/test_validate.py +++ b/tests/test_validate.py @@ -1,11 +1,11 @@ -#!/usr/bin/env python - -# Copyright (C) 2021 Noelia Ruiz Martínez, NV Access Limited +# Copyright (C) 2021-2025 Noelia Ruiz Martínez, NV Access Limited # This file may be used under the terms of the GNU General Public License, version 2 or later. # For more details see: https://www.gnu.org/licenses/gpl-2.0.html + +from collections.abc import Callable from dataclasses import dataclass import unittest -from unittest.mock import patch +from unittest.mock import NonCallableMock, patch import os import json from jsonschema import exceptions @@ -17,12 +17,12 @@ JSON_SCHEMA = validate.JSON_SCHEMA TOP_DIR = os.path.abspath(os.path.dirname(__file__)) SOURCE_DIR = os.path.dirname(TOP_DIR) -TEST_DATA_PATH = os.path.join(SOURCE_DIR, '_tests', 'testData') -ADDON_PACKAGE = os.path.join(TEST_DATA_PATH, f'{VALID_ADDON_ID}.nvda-addon') -ADDON_SUBMISSIONS_DIR = os.path.join(TEST_DATA_PATH, 'addons') -VALID_SUBMISSION_JSON_FILE = os.path.join(ADDON_SUBMISSIONS_DIR, VALID_ADDON_ID, '13.0.0.json') -MANIFEST_FILE = os.path.join(TEST_DATA_PATH, 'manifest.ini') -VERSIONS_FILE = os.path.join(TEST_DATA_PATH, 'nvdaAPIVersions.json') +TEST_DATA_PATH = os.path.join(SOURCE_DIR, "tests", "testData") +ADDON_PACKAGE = os.path.join(TEST_DATA_PATH, f"{VALID_ADDON_ID}.nvda-addon") +ADDON_SUBMISSIONS_DIR = os.path.join(TEST_DATA_PATH, "addons") +VALID_SUBMISSION_JSON_FILE = os.path.join(ADDON_SUBMISSIONS_DIR, VALID_ADDON_ID, "13.0.0.json") +MANIFEST_FILE = os.path.join(TEST_DATA_PATH, "manifest.ini") +VERSIONS_FILE = os.path.join(TEST_DATA_PATH, "nvdaAPIVersions.json") def getValidAddonSubmission() -> validate.JsonObjT: @@ -40,101 +40,72 @@ def getAddonManifest(): class Validate_general(unittest.TestCase): def setUp(self): self.submissionData = getValidAddonSubmission() - self.manifest = getAddonManifest() - - def tearDown(self): - self.submissionData = None - self.manifest = None + self.manifest: addonManifest.AddonManifest = getAddonManifest() def test_validateJson_SchemaNonConformance_Raises(self): self.submissionData["description"] = 3 # should be a string with self.assertRaises(exceptions.ValidationError): - validate._validateJson(self.submissionData) + validate.validateJson(self.submissionData) class Validate_checkDownloadUrlFormat(unittest.TestCase): - """Tests for the checkDownloadUrlFormat function - """ + """Tests for the checkDownloadUrlFormat function""" + def test_validExampleURL(self): url = ( "https://github.com/nvdaes/clipContentsDesigner/releases/download/13.0/" "clipContentsDesigner-13.0.nvda-addon" ) - errors = list( - validate.checkDownloadUrlFormat(url) - ) + errors = list(validate.checkDownloadUrlFormat(url)) self.assertEqual(errors, []) def test_minimalRequirementsURL(self): url = "https://something.nvda-addon" - errors = list( - validate.checkDownloadUrlFormat(url) - ) + errors = list(validate.checkDownloadUrlFormat(url)) self.assertEqual(errors, []) def test_missingHTTPS(self): url = "http://something.nvda-addon" - errors = list( - validate.checkDownloadUrlFormat(url) - ) - self.assertEqual( - errors, - ["Add-on download url must start with https://"] - ) + errors = list(validate.checkDownloadUrlFormat(url)) + self.assertEqual(errors, ["Add-on download url must start with https://"]) def test_missingExt(self): url = "https://example.com" - errors = list( - validate.checkDownloadUrlFormat(url) - ) - self.assertEqual( - errors, - ["Add-on download url must end with .nvda-addon"] - ) + errors = list(validate.checkDownloadUrlFormat(url)) + self.assertEqual(errors, ["Add-on download url must end with .nvda-addon"]) def test_missingHTTPsAndExt(self): url = "http://example.com" - errors = list( - validate.checkDownloadUrlFormat(url) - ) + errors = list(validate.checkDownloadUrlFormat(url)) self.assertEqual( errors, [ "Add-on download url must start with https://", "Add-on download url must end with .nvda-addon", - ] + ], ) class Validate_checkSha256(unittest.TestCase): - """Tests for the checkSha256 function - """ + """Tests for the checkSha256 function""" + validSha = "e27fa778cb99f83ececeb0bc089033929eab5a2fa475ce63e68f50b03b6ab585" def test_valid(self): - errors = validate.checkSha256( - ADDON_PACKAGE, - expectedSha=self.validSha.upper() - ) + errors = validate.checkSha256(ADDON_PACKAGE, expectedSha=self.validSha.upper()) self.assertEqual(list(errors), []) - errors = validate.checkSha256( - ADDON_PACKAGE, - expectedSha=self.validSha.lower() - ) + errors = validate.checkSha256(ADDON_PACKAGE, expectedSha=self.validSha.lower()) self.assertEqual(list(errors), []) def test_invalid(self): errors = validate.checkSha256( # just do a SHA for the manifest file so we don't need to include the whole *.nvda-addon file ADDON_PACKAGE, - expectedSha='abc' + expectedSha="abc", ) errors = list(errors) - self.assertEqual( - errors, - [f"Sha256 of .nvda-addon at URL is: {self.validSha.lower()}"] - ) + self.assertEqual(errors, [f"Sha256 of .nvda-addon at URL is: {self.validSha.lower()}"]) class Validate_checkSummaryMatchesDisplayName(unittest.TestCase): @@ -142,28 +113,20 @@ def setUp(self): self.submissionData = getValidAddonSubmission() self.manifest = getAddonManifest() - def tearDown(self): - self.submissionData = None - self.manifest = None - def test_valid(self): - errors = list( - validate.checkSummaryMatchesDisplayName(self.manifest, self.submissionData) - ) + errors = list(validate.checkSummaryMatchesDisplayName(self.manifest, self.submissionData)) self.assertEqual(errors, []) def test_invalid(self): badDisplayName = "bad display Name" self.submissionData["displayName"] = badDisplayName - errors = list( - validate.checkSummaryMatchesDisplayName(self.manifest, self.submissionData) - ) + errors = list(validate.checkSummaryMatchesDisplayName(self.manifest, self.submissionData)) self.assertEqual( errors, [ f"Submission 'displayName' must be set to '{self.manifest['summary']}' in json file." - f" Instead got: '{badDisplayName}'" - ] + f" Instead got: '{badDisplayName}'", + ], ) @@ -172,81 +135,61 @@ def setUp(self): self.submissionData = getValidAddonSubmission() self.manifest = getAddonManifest() - def tearDown(self): - self.submissionData = None - self.manifest = None - def test_valid(self): - errors = list( - validate.checkDescriptionMatches(self.manifest, self.submissionData) - ) + errors = list(validate.checkDescriptionMatches(self.manifest, self.submissionData)) self.assertEqual(errors, []) def test_invalid(self): badDesc = "bad description" self.submissionData["description"] = badDesc - errors = list( - validate.checkDescriptionMatches(self.manifest, self.submissionData) - ) + errors = list(validate.checkDescriptionMatches(self.manifest, self.submissionData)) self.assertEqual( errors, [ f"Submission 'description' must be set to '{self.manifest['description']}' in json file." - f" Instead got: '{badDesc}'" - ] + f" Instead got: '{badDesc}'", + ], ) class Validate_checkAddonId(unittest.TestCase): """ - Manifest 'name' considered source of truth for addonID - Must match: - - Submission file name '/.json' - - `addonId` within the submission JSON data + Manifest 'name' considered source of truth for addonID + Must match: + - Submission file name '/.json' + - `addonId` within the submission JSON data """ + def setUp(self): self.submissionData = getValidAddonSubmission() self.manifest = getAddonManifest() - def tearDown(self): - self.submissionData = None - self.manifest = None - def test_valid(self): - """No error when manifest 'name', submission file path, and submission contents all agree. - """ - errors = list( - validate.checkAddonId(self.manifest, VALID_SUBMISSION_JSON_FILE, self.submissionData) - ) + """No error when manifest 'name', submission file path, and submission contents all agree.""" + errors = list(validate.checkAddonId(self.manifest, VALID_SUBMISSION_JSON_FILE, self.submissionData)) self.assertEqual( [ # expected errors ], - errors + errors, ) - @patch('os.path.basename', return_value="valid1-Addon_id") - def test_valid_withSymbols(self, mock_basename): - """ Error when submission does not include correct addonId format - """ - self.submissionData['addonId'] = "valid1-Addon_id" - self.manifest['name'] = "valid1-Addon_id" - errors = list( - validate.checkAddonId(self.manifest, VALID_SUBMISSION_JSON_FILE, self.submissionData) - ) + @patch("os.path.basename", return_value="valid1-Addon_id") + def test_valid_withSymbols(self, mock_basename: NonCallableMock): + """Error when submission does not include correct addonId format""" + self.submissionData["addonId"] = "valid1-Addon_id" + self.manifest["name"] = "valid1-Addon_id" + errors = list(validate.checkAddonId(self.manifest, VALID_SUBMISSION_JSON_FILE, self.submissionData)) self.assertEqual( [ # expected errors ], - errors + errors, ) def test_invalidPath(self): - """ Error when submission path does not include correct addon ID - """ + """Error when submission path does not include correct addon ID""" filename = os.path.join(TOP_DIR, "invalid") - errors = list( - validate.checkAddonId(self.manifest, filename, self.submissionData) - ) + errors = list(validate.checkAddonId(self.manifest, filename, self.submissionData)) self.assertEqual( [ # expected errors ( # invalidPathError @@ -254,36 +197,30 @@ def test_invalidPath(self): f" the addonId/name '{self.manifest['name']}'" ), ], - errors + errors, ) def test_invalidJSONData(self): - """ Error when submission does not include correct addonId - """ + """Error when submission does not include correct addonId""" invalidID = "invalid" - self.submissionData['addonId'] = invalidID - errors = list( - validate.checkAddonId(self.manifest, VALID_SUBMISSION_JSON_FILE, self.submissionData) - ) + self.submissionData["addonId"] = invalidID + errors = list(validate.checkAddonId(self.manifest, VALID_SUBMISSION_JSON_FILE, self.submissionData)) self.assertEqual( [ # expected errors ( # idMismatchError "Submission data 'addonId' field does not match 'name' field" f" in addon manifest: {VALID_ADDON_ID} vs {invalidID}" - ) + ), ], - errors + errors, ) def test_invalidJSONDataAndPath(self): - """ Error when submission does not include correct addonId and file path does not include the addonID - """ + """Error when submission does not include correct addonId and file path does not include the addonID""" expectedAddonId = "valid" - self.manifest['name'] = expectedAddonId - errors = list( - validate.checkAddonId(self.manifest, VALID_SUBMISSION_JSON_FILE, self.submissionData) - ) + self.manifest["name"] = expectedAddonId + errors = list(validate.checkAddonId(self.manifest, VALID_SUBMISSION_JSON_FILE, self.submissionData)) self.assertEqual( [ # expected errors @@ -295,47 +232,41 @@ def test_invalidJSONDataAndPath(self): f" in addon manifest: {expectedAddonId} vs {'fake'}" ), ], - errors + errors, ) - @patch('os.path.basename', return_value="invalid addon id") - def test_invalidAddonIdFormat_spaces(self, mock_basename): - """ Error when submission does not include correct addonId format - """ - self.submissionData['addonId'] = "invalid addon id" - self.manifest['name'] = "invalid addon id" - errors = list( - validate.checkAddonId(self.manifest, VALID_SUBMISSION_JSON_FILE, self.submissionData) - ) + @patch("os.path.basename", return_value="invalid addon id") + def test_invalidAddonIdFormat_spaces(self, mock_basename: NonCallableMock): + """Error when submission does not include correct addonId format""" + self.submissionData["addonId"] = "invalid addon id" + self.manifest["name"] = "invalid addon id" + errors = list(validate.checkAddonId(self.manifest, VALID_SUBMISSION_JSON_FILE, self.submissionData)) self.assertEqual( [ # expected errors "Submission data 'addonId' field does not match the expected format:" " must start and end with a letter, and contain only letters," " numbers, underscores, and hyphens. " - "ID: invalid addon id" + "ID: invalid addon id", ], - errors + errors, ) - @patch('os.path.basename', return_value="1invalid-addon-id") - def test_invalidAddonIdFormat_invalidStartChar(self, mock_basename): - """ Error when submission does not include correct addonId format - """ - self.submissionData['addonId'] = "1invalid-addon-id" - self.manifest['name'] = "1invalid-addon-id" - errors = list( - validate.checkAddonId(self.manifest, VALID_SUBMISSION_JSON_FILE, self.submissionData) - ) + @patch("os.path.basename", return_value="1invalid-addon-id") + def test_invalidAddonIdFormat_invalidStartChar(self, mock_basename: NonCallableMock): + """Error when submission does not include correct addonId format""" + self.submissionData["addonId"] = "1invalid-addon-id" + self.manifest["name"] = "1invalid-addon-id" + errors = list(validate.checkAddonId(self.manifest, VALID_SUBMISSION_JSON_FILE, self.submissionData)) self.assertEqual( [ # expected errors "Submission data 'addonId' field does not match the expected format:" " must start and end with a letter, and contain only letters," " numbers, underscores, and hyphens. " - "ID: 1invalid-addon-id" + "ID: 1invalid-addon-id", ], - errors + errors, ) @@ -348,6 +279,7 @@ class VersionNumber: class validate_getExistingVersions(unittest.TestCase): """Tests for the getExistingVersions function.""" + def setUp(self): self.verFilename = VERSIONS_FILE @@ -356,29 +288,23 @@ def tearDown(self): def test_getExistingVersions(self): formattedVersions = list(validate.getExistingVersions(self.verFilename)) - self.assertEqual( - formattedVersions, - ["0.0.0", "2022.1.0", "2023.1.0", "2024.1.0"] - ) + self.assertEqual(formattedVersions, ["0.0.0", "2022.1.0", "2023.1.0", "2024.1.0"]) class validate_checkLastTestedVersionExists(unittest.TestCase): """Test for the checkLastTestedVersionExists function.""" + def setUp(self): self.submissionData = getValidAddonSubmission() self.verFilename = VERSIONS_FILE - def tearDown(self): - self.submissionData = None - self.verFilename = "" - def test_validOld(self): self.submissionData["lastTestedVersion"]["major"] = 0 self.submissionData["lastTestedVersion"]["minor"] = 0 self.submissionData["lastTestedVersion"]["patch"] = 0 self.assertEqual( list(validate.checkLastTestedVersionExist(self.submissionData, self.verFilename)), - [] + [], ) def test_validNew(self): @@ -387,7 +313,7 @@ def test_validNew(self): self.submissionData["lastTestedVersion"]["patch"] = 0 self.assertEqual( list(validate.checkLastTestedVersionExist(self.submissionData, self.verFilename)), - [] + [], ) def test_invalidOld(self): @@ -396,7 +322,7 @@ def test_invalidOld(self): self.submissionData["lastTestedVersion"]["patch"] = 0 self.assertEqual( list(validate.checkLastTestedVersionExist(self.submissionData, self.verFilename)), - ["Last tested version error: 2018.3.0 doesn't exist"] + ["Last tested version error: 2018.3.0 doesn't exist"], ) def test_invalidNew(self): @@ -405,7 +331,7 @@ def test_invalidNew(self): self.submissionData["lastTestedVersion"]["patch"] = 0 self.assertEqual( list(validate.checkLastTestedVersionExist(self.submissionData, self.verFilename)), - ["Last tested version error: 9999.3.0 doesn't exist"] + ["Last tested version error: 9999.3.0 doesn't exist"], ) def test_validExperimental(self): @@ -415,7 +341,7 @@ def test_validExperimental(self): self.submissionData["channel"] = "beta" self.assertEqual( list(validate.checkLastTestedVersionExist(self.submissionData, self.verFilename)), - [] + [], ) def test_invalidExperimental(self): @@ -427,28 +353,25 @@ def test_invalidExperimental(self): list(validate.checkLastTestedVersionExist(self.submissionData, self.verFilename)), [ "Last tested version error: 2024.1.0 is not stable yet. " - "Please submit add-on using the beta or dev channel." - ] + "Please submit add-on using the beta or dev channel.", + ], ) class validate_checkMinRequiredVersionExists(unittest.TestCase): """Test for the checkMinRequiredVersionExists function.""" + def setUp(self): self.submissionData = getValidAddonSubmission() self.verFilename = VERSIONS_FILE - def tearDown(self): - self.submissionData = None - self.verFilename = "" - def test_validOld(self): self.submissionData["minNVDAVersion"]["major"] = 0 self.submissionData["minNVDAVersion"]["minor"] = 0 self.submissionData["minNVDAVersion"]["patch"] = 0 self.assertEqual( list(validate.checkMinRequiredVersionExist(self.submissionData, self.verFilename)), - [] + [], ) def test_validNew(self): @@ -457,7 +380,7 @@ def test_validNew(self): self.submissionData["minNVDAVersion"]["patch"] = 0 self.assertEqual( list(validate.checkMinRequiredVersionExist(self.submissionData, self.verFilename)), - [] + [], ) def test_invalidOld(self): @@ -466,7 +389,7 @@ def test_invalidOld(self): self.submissionData["minNVDAVersion"]["patch"] = 0 self.assertEqual( list(validate.checkMinRequiredVersionExist(self.submissionData, self.verFilename)), - ["Minimum required version error: 2018.3.0 doesn't exist"] + ["Minimum required version error: 2018.3.0 doesn't exist"], ) def test_invalidNew(self): @@ -475,7 +398,7 @@ def test_invalidNew(self): self.submissionData["minNVDAVersion"]["patch"] = 0 self.assertEqual( list(validate.checkMinRequiredVersionExist(self.submissionData, self.verFilename)), - ["Minimum required version error: 9999.3.0 doesn't exist"] + ["Minimum required version error: 9999.3.0 doesn't exist"], ) def test_validExperimental(self): @@ -485,7 +408,7 @@ def test_validExperimental(self): self.submissionData["channel"] = "beta" self.assertEqual( list(validate.checkMinRequiredVersionExist(self.submissionData, self.verFilename)), - [] + [], ) def test_invalidExperimental(self): @@ -497,108 +420,88 @@ def test_invalidExperimental(self): list(validate.checkMinRequiredVersionExist(self.submissionData, self.verFilename)), [ "Minimum required version error: 2024.1.0 is not stable yet. " - "Please submit add-on using the beta or dev channel." - ] + "Please submit add-on using the beta or dev channel.", + ], ) class Validate_checkMinNVDAVersionMatches(unittest.TestCase): - """Tests for the checkMinNVDAVersionMatches function. - """ + """Tests for the checkMinNVDAVersionMatches function.""" + def setUp(self): self.submissionData = getValidAddonSubmission() self.manifest = getAddonManifest() - def tearDown(self): - self.submissionData = None - self.manifest = None - def test_valid(self): - errors = list( - validate.checkMinNVDAVersionMatches(self.manifest, self.submissionData) - ) + errors = list(validate.checkMinNVDAVersionMatches(self.manifest, self.submissionData)) self.assertEqual(errors, []) def test_invalid(self): self.manifest["minimumNVDAVersion"] = (1999, 1, 0) - errors = list( - validate.checkMinNVDAVersionMatches(self.manifest, self.submissionData) - ) + errors = list(validate.checkMinNVDAVersionMatches(self.manifest, self.submissionData)) self.assertEqual( errors, [ "Submission data 'minNVDAVersion' field does not match 'minNVDAVersion' field " - 'in addon manifest: 1999.1.0 vs minNVDAVersion: 2022.1.0' - ] + "in addon manifest: 1999.1.0 vs minNVDAVersion: 2022.1.0", + ], ) class Validate_checkLastTestedNVDAVersionMatches(unittest.TestCase): - """Tests for the checkLastTestedNVDAVersionMatches function. - """ + """Tests for the checkLastTestedNVDAVersionMatches function.""" + def setUp(self): self.submissionData = getValidAddonSubmission() self.manifest = getAddonManifest() - def tearDown(self): - self.submissionData = None - self.manifest = None - def test_valid(self): - errors = list( - validate.checkLastTestedNVDAVersionMatches(self.manifest, self.submissionData) - ) + errors = list(validate.checkLastTestedNVDAVersionMatches(self.manifest, self.submissionData)) self.assertEqual(errors, []) def test_invalid(self): self.manifest["lastTestedNVDAVersion"] = (9999, 1, 0) - errors = list( - validate.checkLastTestedNVDAVersionMatches(self.manifest, self.submissionData) - ) + errors = list(validate.checkLastTestedNVDAVersionMatches(self.manifest, self.submissionData)) self.assertEqual( errors, [ "Submission data 'lastTestedVersion' field does not match " "'lastTestedNVDAVersion' field in addon manifest: 9999.1.0 vs " - 'lastTestedVersion: 2023.1.0' - ] + "lastTestedVersion: 2023.1.0", + ], ) class Validate_checkVersions(unittest.TestCase): """Tests for the checkVersions function. - The following are considered: - - A: Submission file name '/.json' - - B: `addonVersionNumber` field within the submission JSON data - - C: `addonVersionName` field within the submission JSON data - - D: Manifest addon version name - - Constraints: - - The submission file name (A) must be a string representation of the `addonVersionNumber` field (B) - (fully qualified) eg '21.3.0.json' - - The `addonVersionName` field (C) must match the manifest version name (D) - - The `addonVersionName` field can be parsed as 2 or 3 digits, - which match the `addonVersionNumber` field (B) + The following are considered: + - A: Submission file name '/.json' + - B: `addonVersionNumber` field within the submission JSON data + - C: `addonVersionName` field within the submission JSON data + - D: Manifest addon version name + + Constraints: + - The submission file name (A) must be a string representation of the `addonVersionNumber` field (B) + (fully qualified) eg '21.3.0.json' + - The `addonVersionName` field (C) must match the manifest version name (D) + - The `addonVersionName` field can be parsed as 2 or 3 digits, + which match the `addonVersionNumber` field (B) """ + def setUp(self): self.submissionData = getValidAddonSubmission() self.manifest = getAddonManifest() self.fileName = "" - def tearDown(self): - self.submissionData = None - self.manifest = None - def _setupVersions( - self, - submissionFileNameVer: str, - versionNum: VersionNumber, - versionName: str, - manifestVersion: str + self, + submissionFileNameVer: str, + versionNum: VersionNumber, + versionName: str, + manifestVersion: str, ): - """Mutate instance variables for testing convenience - """ + """Mutate instance variables for testing convenience""" self.fileName = os.path.join(ADDON_SUBMISSIONS_DIR, VALID_ADDON_ID, f"{submissionFileNameVer}.json") self.submissionData["addonVersionNumber"]["major"] = versionNum.major self.submissionData["addonVersionNumber"]["minor"] = versionNum.minor @@ -608,24 +511,22 @@ def _setupVersions( def test_valid(self): """No error when: - - manifest version matches submission addonVersionName - - submission file name matches submission addonVersionNumber (fully qualified) - - submission addonVersionName can be parsed and matches addonVersionNumber + - manifest version matches submission addonVersionName + - submission file name matches submission addonVersionNumber (fully qualified) + - submission addonVersionName can be parsed and matches addonVersionNumber """ versionName = "13.6.5" self._setupVersions( submissionFileNameVer=versionName, versionNum=VersionNumber(13, 6, 5), versionName=versionName, - manifestVersion=versionName - ) - errors = list( - validate.checkVersions(self.manifest, self.fileName, self.submissionData) + manifestVersion=versionName, ) + errors = list(validate.checkVersions(self.manifest, self.fileName, self.submissionData)) self.assertEqual([], errors) def test_fileNameMustMatchVerNum(self): - """ Error expected when fileName is not a fully qualified (trailing zero's included), + """Error expected when fileName is not a fully qualified (trailing zero's included), dot separated representation of the addonVersionNumber: eg '21.3.0.json' """ versionName = "13.06" @@ -633,19 +534,17 @@ def test_fileNameMustMatchVerNum(self): submissionFileNameVer=versionName, # expect "13.6.0" versionNum=VersionNumber(13, 6), versionName=versionName, - manifestVersion=versionName - ) - errors = list( - validate.checkVersions(self.manifest, self.fileName, self.submissionData) + manifestVersion=versionName, ) + errors = list(validate.checkVersions(self.manifest, self.fileName, self.submissionData)) self.assertEqual( [ # expected errors - 'Submission filename and versionNumber mismatch error:' - ' addonVersionNumber: 13.6.0' - ' version from submission filename: 13.06' - ' expected submission filename: 13.6.0.json' + "Submission filename and versionNumber mismatch error:" + " addonVersionNumber: 13.6.0" + " version from submission filename: 13.06" + " expected submission filename: 13.6.0.json", ], - errors + errors, ) def test_fileNameMustUseFullyQualifiedVersion(self): @@ -657,177 +556,150 @@ def test_fileNameMustUseFullyQualifiedVersion(self): submissionFileNameVer=versionName, # expect "13.6.0" versionNum=VersionNumber(13, 6), versionName=versionName, - manifestVersion=versionName - ) - errors = list( - validate.checkVersions(self.manifest, self.fileName, self.submissionData) + manifestVersion=versionName, ) + errors = list(validate.checkVersions(self.manifest, self.fileName, self.submissionData)) self.assertEqual( [ # expected errors - 'Submission filename and versionNumber mismatch error:' - ' addonVersionNumber: 13.6.0' - ' version from submission filename: 13.6' - ' expected submission filename: 13.6.0.json' + "Submission filename and versionNumber mismatch error:" + " addonVersionNumber: 13.6.0" + " version from submission filename: 13.6" + " expected submission filename: 13.6.0.json", ], - errors + errors, ) def test_dateBasedVersionNameValid(self): - """ Date based version in manifest is ok. Add-ons use this scheme. - """ + """Date based version in manifest is ok. Add-ons use this scheme.""" self._setupVersions( - submissionFileNameVer='13.6.0', + submissionFileNameVer="13.6.0", versionNum=VersionNumber(13, 6), versionName="13.06", - manifestVersion="13.06" - ) - errors = list( - validate.checkVersions(self.manifest, self.fileName, self.submissionData) + manifestVersion="13.06", ) + errors = list(validate.checkVersions(self.manifest, self.fileName, self.submissionData)) self.assertEqual( [], errors, ) def test_dateBasedWithPatchVersionNameValid(self): - """ Date based version in manifest is ok. Add-ons use this scheme. - """ + """Date based version in manifest is ok. Add-ons use this scheme.""" self._setupVersions( - submissionFileNameVer='13.6.5', + submissionFileNameVer="13.6.5", versionNum=VersionNumber(13, 6, 5), versionName="13.06.5", - manifestVersion="13.06.5" - ) - errors = list( - validate.checkVersions(self.manifest, self.fileName, self.submissionData) - ) - self.assertEqual( - [], - errors + manifestVersion="13.06.5", ) + errors = list(validate.checkVersions(self.manifest, self.fileName, self.submissionData)) + self.assertEqual([], errors) def test_unparseableVersionName(self): - """ Error when versionName include characters unable to be parsed to numeric form. + """Error when versionName include characters unable to be parsed to numeric form. These situations will need to be considered manually. """ self._setupVersions( - submissionFileNameVer='13.6.0', + submissionFileNameVer="13.6.0", versionNum=VersionNumber(13, 6), versionName="13.06-NG", - manifestVersion="13.06-NG" - ) - errors = list( - validate.checkVersions(self.manifest, self.fileName, self.submissionData) + manifestVersion="13.06-NG", ) + errors = list(validate.checkVersions(self.manifest, self.fileName, self.submissionData)) self.assertEqual( [ # expected errors ( "Warning: submission data 'addonVersionName' and 'addonVersionNumber' " - 'mismatch. Unable to parse: 13.06-NG and match with 13.6.0' - ) + "mismatch. Unable to parse: 13.06-NG and match with 13.6.0" + ), ], - errors + errors, ) def test_nonNumericVersionName(self): - """ Error when versionName include characters unable to be parsed to numeric form. + """Error when versionName include characters unable to be parsed to numeric form. These situations will need to be considered manually. """ versionName = "June Release '21" self._setupVersions( - submissionFileNameVer='13.6.0', + submissionFileNameVer="13.6.0", versionNum=VersionNumber(13, 6), versionName=versionName, - manifestVersion=versionName - ) - errors = list( - validate.checkVersions(self.manifest, self.fileName, self.submissionData) + manifestVersion=versionName, ) + errors = list(validate.checkVersions(self.manifest, self.fileName, self.submissionData)) self.assertEqual( [ # expected errors ( "Warning: submission data 'addonVersionName' and 'addonVersionNumber' " "mismatch. Unable to parse: June Release '21 and match with 13.6.0" - ) + ), ], - errors + errors, ) def test_versionNameMustMatchManifest(self): - """ Ensure there is no mistake with the release submission, the submission addonVersionName must match + """Ensure there is no mistake with the release submission, the submission addonVersionName must match the version field from the manifest. """ self._setupVersions( submissionFileNameVer="12.2.0", versionNum=VersionNumber(12, 2), versionName="12.2", - manifestVersion="13.2" - ) - errors = list( - validate.checkVersions(self.manifest, self.fileName, self.submissionData) + manifestVersion="13.2", ) + errors = list(validate.checkVersions(self.manifest, self.fileName, self.submissionData)) self.assertEqual( [ # expected errors ( "Submission data 'addonVersionName' field does not match 'version' field" " in addon manifest: 13.2 vs addonVersionName: 12.2" - ) + ), ], - errors + errors, ) class Validate_End2End(unittest.TestCase): - class OpenUrlResult: - def __init__(self, readFunc): + def __init__(self, readFunc: Callable[[], bytes]) -> None: self.read = readFunc self.code = 200 - self.headers = { - "content-length": os.path.getsize(ADDON_PACKAGE) - } + self.headers = {"content-length": os.path.getsize(ADDON_PACKAGE)} def setUp(self) -> None: - self.addonReader = open(ADDON_PACKAGE, 'rb') + self.addonReader = open(ADDON_PACKAGE, "rb") self.urlOpenResult = self.OpenUrlResult(self.addonReader.read) def tearDown(self) -> None: self.addonReader.close() - @patch('_validate.validate.urllib.request.urlopen') - def test_success(self, mock_urlopen): - """Run validate on a known good file. - """ + @patch("_validate.validate.urllib.request.urlopen") + def test_success(self, mock_urlopen: NonCallableMock): + """Run validate on a known good file.""" mock_urlopen.return_value = self.urlOpenResult - errors = list( - validate.validateSubmission(VALID_SUBMISSION_JSON_FILE, VERSIONS_FILE) - ) + errors = list(validate.validateSubmission(VALID_SUBMISSION_JSON_FILE, VERSIONS_FILE)) self.assertEqual(list(errors), []) - @patch('_validate.validate.urllib.request.urlopen') - def test_downloadFailure(self, mock_urlopen): - """Unable to download addon - """ + @patch("_validate.validate.urllib.request.urlopen") + def test_downloadFailure(self, mock_urlopen: NonCallableMock): + """Unable to download addon""" self.urlOpenResult.code = 404 # add-on not found mock_urlopen.return_value = self.urlOpenResult - errors = list( - validate.validateSubmission(VALID_SUBMISSION_JSON_FILE, VERSIONS_FILE) - ) + errors = list(validate.validateSubmission(VALID_SUBMISSION_JSON_FILE, VERSIONS_FILE)) self.assertEqual( errors, [ - 'Download of addon failed', - 'Fatal error, unable to continue: Unable to download from ' + "Download of addon failed", + "Fatal error, unable to continue: Unable to download from " # note this the mocked urlopen function actually fetches from ADDON_PACKAGE - 'https://github.com/' - 'nvaccess/dont/use/this/address/fake.nvda-addon, ' - 'HTTP response status code: 404' - ] + "https://github.com/" + "nvaccess/dont/use/this/address/fake.nvda-addon, " + "HTTP response status code: 404", + ], ) class ParseVersionString(unittest.TestCase): - def test_single(self): self.assertEqual( { @@ -835,7 +707,7 @@ def test_single(self): "minor": 0, "patch": 0, }, - validate.parseVersionStr("24") + validate.parseVersionStr("24"), ) def test_double(self): @@ -845,7 +717,7 @@ def test_double(self): "minor": 6, "patch": 0, }, - validate.parseVersionStr("24.6") + validate.parseVersionStr("24.6"), ) def test_triple(self): @@ -855,41 +727,34 @@ def test_triple(self): "minor": 6, "patch": 1, }, - validate.parseVersionStr("24.6.1") + validate.parseVersionStr("24.6.1"), ) class VersionRegex(unittest.TestCase): - def test_versionMajorMinorPatch_valid(self): ver = "23.5.1" matches = validate.VERSION_PARSE.match(ver) self.assertTrue(matches) + assert matches groups = list(x for x in matches.groups() if x) - self.assertEqual( - ['23', '5', '1'], - groups - ) + self.assertEqual(["23", "5", "1"], groups) def test_versionMajorMinor_valid(self): ver = "6.0" matches = validate.VERSION_PARSE.match(ver) self.assertTrue(matches) + assert matches groups = list(x for x in matches.groups() if x) - self.assertEqual( - ['6', '0'], - groups - ) + self.assertEqual(["6", "0"], groups) def test_versionMajor_valid(self): ver = "1" matches = validate.VERSION_PARSE.match(ver) self.assertTrue(matches) + assert matches groups = list(x for x in matches.groups() if x) - self.assertEqual( - ['1'], - groups - ) + self.assertEqual(["1"], groups) def test_NonDotSep_invalid(self): ver = f"{3},{2},{1}" diff --git a/tox.ini b/tox.ini deleted file mode 100644 index 27c8afc..0000000 --- a/tox.ini +++ /dev/null @@ -1,52 +0,0 @@ -# tox (https://tox.readthedocs.io/) is a tool for running tests -# in multiple virtualenvs. This configuration file will run the -# test suite on all supported python versions. To use it, "pip install tox" -# and then run "tox" from this directory. - -[tox] -skipsdist=True -envlist = py - -[testenv] -deps = -r requirements.txt - -commands = - python -m unittest discover - flake8 - -[flake8] -# Plugins -use-flake8-tabs = True -# Not all checks are replaced by flake8-tabs, however, pycodestyle is still not compatible with tabs. -continuation-style = hanging -## The following are replaced by flake8-tabs plugin, reported as ET codes rather than E codes. -# E121, E122, E123, E126, E127, E128, -## The following (all disabled) are not replaced by flake8-tabs, -# E124 - Requires mixing spaces and tabs: Closing bracket does not match visual indentation. -# E125 - Does not take tabs into consideration: Continuation line with same indent as next logical line. -# E129 - Requires mixing spaces and tabs: Visually indented line with same indent as next logical line -# E131 - Requires mixing spaces and tabs: Continuation line unaligned for hanging indent -# E133 - Our preference handled by ET126: Closing bracket is missing indentation - - -# Reporting -statistics = True -doctests = True -show-source = True - -# Options -max-complexity = 15 -max-line-length = 110 -# Final bracket should match indentation of the start of the line of the opening bracket -hang-closing = False - -ignore = - # indentation contains tabs - W191, - # line break before binary operator. We want W504(line break after binary operator) - W503, - -filename = - *.py - -extend-exclude = venv,.venv diff --git a/uv.lock b/uv.lock new file mode 100644 index 0000000..adcb508 --- /dev/null +++ b/uv.lock @@ -0,0 +1,258 @@ +version = 1 +revision = 3 +requires-python = "==3.13.*" +resolution-markers = [ + "sys_platform == 'win32'", +] +supported-markers = [ + "sys_platform == 'win32'", +] + +[[package]] +name = "addon-datastore-validation" +source = { editable = "." } +dependencies = [ + { name = "configobj", marker = "sys_platform == 'win32'" }, + { name = "jsonschema", marker = "sys_platform == 'win32'" }, +] + +[package.dev-dependencies] +lint = [ + { name = "pre-commit", marker = "sys_platform == 'win32'" }, + { name = "pyright", marker = "sys_platform == 'win32'" }, + { name = "ruff", marker = "sys_platform == 'win32'" }, +] +unit-tests = [ + { name = "unittest-xml-reporting", marker = "sys_platform == 'win32'" }, +] + +[package.metadata] +requires-dist = [ + { name = "configobj", git = "https://github.com/DiffSK/configobj?rev=8be54629ee7c26acb5c865b74c76284e80f3aa31" }, + { name = "jsonschema", specifier = "==4.25.1" }, +] + +[package.metadata.requires-dev] +lint = [ + { name = "pre-commit", specifier = "==4.3.0" }, + { name = "pyright", specifier = "==1.1.405" }, + { name = "ruff", specifier = "==0.13.0" }, +] +unit-tests = [{ name = "unittest-xml-reporting", specifier = "==3.2.0" }] + +[[package]] +name = "attrs" +version = "25.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/1367933a8532ee6ff8d63537de4f1177af4bff9f3e829baf7331f595bb24/attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b", size = 812032, upload-time = "2025-03-13T11:10:22.779Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815, upload-time = "2025-03-13T11:10:21.14Z" }, +] + +[[package]] +name = "cfgv" +version = "3.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/11/74/539e56497d9bd1d484fd863dd69cbbfa653cd2aa27abfe35653494d85e94/cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560", size = 7114, upload-time = "2023-08-12T20:38:17.776Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c5/55/51844dd50c4fc7a33b653bfaba4c2456f06955289ca770a5dbd5fd267374/cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9", size = 7249, upload-time = "2023-08-12T20:38:16.269Z" }, +] + +[[package]] +name = "configobj" +version = "5.1.0.dev0" +source = { git = "https://github.com/DiffSK/configobj?rev=8be54629ee7c26acb5c865b74c76284e80f3aa31#8be54629ee7c26acb5c865b74c76284e80f3aa31" } + +[[package]] +name = "distlib" +version = "0.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/96/8e/709914eb2b5749865801041647dc7f4e6d00b549cfe88b65ca192995f07c/distlib-0.4.0.tar.gz", hash = "sha256:feec40075be03a04501a973d81f633735b4b69f98b05450592310c0f401a4e0d", size = 614605, upload-time = "2025-07-17T16:52:00.465Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/33/6b/e0547afaf41bf2c42e52430072fa5658766e3d65bd4b03a563d1b6336f57/distlib-0.4.0-py2.py3-none-any.whl", hash = "sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16", size = 469047, upload-time = "2025-07-17T16:51:58.613Z" }, +] + +[[package]] +name = "filelock" +version = "3.19.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/40/bb/0ab3e58d22305b6f5440629d20683af28959bf793d98d11950e305c1c326/filelock-3.19.1.tar.gz", hash = "sha256:66eda1888b0171c998b35be2bcc0f6d75c388a7ce20c3f3f37aa8e96c2dddf58", size = 17687, upload-time = "2025-08-14T16:56:03.016Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/42/14/42b2651a2f46b022ccd948bca9f2d5af0fd8929c4eec235b8d6d844fbe67/filelock-3.19.1-py3-none-any.whl", hash = "sha256:d38e30481def20772f5baf097c122c3babc4fcdb7e14e57049eb9d88c6dc017d", size = 15988, upload-time = "2025-08-14T16:56:01.633Z" }, +] + +[[package]] +name = "identify" +version = "2.6.14" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/52/c4/62963f25a678f6a050fb0505a65e9e726996171e6dbe1547f79619eefb15/identify-2.6.14.tar.gz", hash = "sha256:663494103b4f717cb26921c52f8751363dc89db64364cd836a9bf1535f53cd6a", size = 99283, upload-time = "2025-09-06T19:30:52.938Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/ae/2ad30f4652712c82f1c23423d79136fbce338932ad166d70c1efb86a5998/identify-2.6.14-py2.py3-none-any.whl", hash = "sha256:11a073da82212c6646b1f39bb20d4483bfb9543bd5566fec60053c4bb309bf2e", size = 99172, upload-time = "2025-09-06T19:30:51.759Z" }, +] + +[[package]] +name = "jsonschema" +version = "4.25.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs", marker = "sys_platform == 'win32'" }, + { name = "jsonschema-specifications", marker = "sys_platform == 'win32'" }, + { name = "referencing", marker = "sys_platform == 'win32'" }, + { name = "rpds-py", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/74/69/f7185de793a29082a9f3c7728268ffb31cb5095131a9c139a74078e27336/jsonschema-4.25.1.tar.gz", hash = "sha256:e4a9655ce0da0c0b67a085847e00a3a51449e1157f4f75e9fb5aa545e122eb85", size = 357342, upload-time = "2025-08-18T17:03:50.038Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bf/9c/8c95d856233c1f82500c2450b8c68576b4cf1c871db3afac5c34ff84e6fd/jsonschema-4.25.1-py3-none-any.whl", hash = "sha256:3fba0169e345c7175110351d456342c364814cfcf3b964ba4587f22915230a63", size = 90040, upload-time = "2025-08-18T17:03:48.373Z" }, +] + +[[package]] +name = "jsonschema-specifications" +version = "2025.9.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "referencing", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/19/74/a633ee74eb36c44aa6d1095e7cc5569bebf04342ee146178e2d36600708b/jsonschema_specifications-2025.9.1.tar.gz", hash = "sha256:b540987f239e745613c7a9176f3edb72b832a4ac465cf02712288397832b5e8d", size = 32855, upload-time = "2025-09-08T01:34:59.186Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl", hash = "sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe", size = 18437, upload-time = "2025-09-08T01:34:57.871Z" }, +] + +[[package]] +name = "lxml" +version = "6.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8f/bd/f9d01fd4132d81c6f43ab01983caea69ec9614b913c290a26738431a015d/lxml-6.0.1.tar.gz", hash = "sha256:2b3a882ebf27dd026df3801a87cf49ff791336e0f94b0fad195db77e01240690", size = 4070214, upload-time = "2025-08-22T10:37:53.525Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f2/43/c1cb2a7c67226266c463ef8a53b82d42607228beb763b5fbf4867e88a21f/lxml-6.0.1-cp313-cp313-win32.whl", hash = "sha256:01dab65641201e00c69338c9c2b8a0f2f484b6b3a22d10779bb417599fae32b5", size = 3610051, upload-time = "2025-08-22T10:34:04.553Z" }, + { url = "https://files.pythonhosted.org/packages/34/96/6a6c3b8aa480639c1a0b9b6faf2a63fb73ab79ffcd2a91cf28745faa22de/lxml-6.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:bdf8f7c8502552d7bff9e4c98971910a0a59f60f88b5048f608d0a1a75e94d1c", size = 4009325, upload-time = "2025-08-22T10:34:06.24Z" }, + { url = "https://files.pythonhosted.org/packages/8c/66/622e8515121e1fd773e3738dae71b8df14b12006d9fb554ce90886689fd0/lxml-6.0.1-cp313-cp313-win_arm64.whl", hash = "sha256:a6aeca75959426b9fd8d4782c28723ba224fe07cfa9f26a141004210528dcbe2", size = 3670443, upload-time = "2025-08-22T10:34:07.974Z" }, +] + +[[package]] +name = "nodeenv" +version = "1.9.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437, upload-time = "2024-06-04T18:44:11.171Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload-time = "2024-06-04T18:44:08.352Z" }, +] + +[[package]] +name = "platformdirs" +version = "4.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/23/e8/21db9c9987b0e728855bd57bff6984f67952bea55d6f75e055c46b5383e8/platformdirs-4.4.0.tar.gz", hash = "sha256:ca753cf4d81dc309bc67b0ea38fd15dc97bc30ce419a7f58d13eb3bf14c4febf", size = 21634, upload-time = "2025-08-26T14:32:04.268Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/40/4b/2028861e724d3bd36227adfa20d3fd24c3fc6d52032f4a93c133be5d17ce/platformdirs-4.4.0-py3-none-any.whl", hash = "sha256:abd01743f24e5287cd7a5db3752faf1a2d65353f38ec26d98e25a6db65958c85", size = 18654, upload-time = "2025-08-26T14:32:02.735Z" }, +] + +[[package]] +name = "pre-commit" +version = "4.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cfgv", marker = "sys_platform == 'win32'" }, + { name = "identify", marker = "sys_platform == 'win32'" }, + { name = "nodeenv", marker = "sys_platform == 'win32'" }, + { name = "pyyaml", marker = "sys_platform == 'win32'" }, + { name = "virtualenv", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ff/29/7cf5bbc236333876e4b41f56e06857a87937ce4bf91e117a6991a2dbb02a/pre_commit-4.3.0.tar.gz", hash = "sha256:499fe450cc9d42e9d58e606262795ecb64dd05438943c62b66f6a8673da30b16", size = 193792, upload-time = "2025-08-09T18:56:14.651Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5b/a5/987a405322d78a73b66e39e4a90e4ef156fd7141bf71df987e50717c321b/pre_commit-4.3.0-py2.py3-none-any.whl", hash = "sha256:2b0747ad7e6e967169136edffee14c16e148a778a54e4f967921aa1ebf2308d8", size = 220965, upload-time = "2025-08-09T18:56:13.192Z" }, +] + +[[package]] +name = "pyright" +version = "1.1.405" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "nodeenv", marker = "sys_platform == 'win32'" }, + { name = "typing-extensions", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fb/6c/ba4bbee22e76af700ea593a1d8701e3225080956753bee9750dcc25e2649/pyright-1.1.405.tar.gz", hash = "sha256:5c2a30e1037af27eb463a1cc0b9f6d65fec48478ccf092c1ac28385a15c55763", size = 4068319, upload-time = "2025-09-04T03:37:06.776Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d5/1a/524f832e1ff1962a22a1accc775ca7b143ba2e9f5924bb6749dce566784a/pyright-1.1.405-py3-none-any.whl", hash = "sha256:a2cb13700b5508ce8e5d4546034cb7ea4aedb60215c6c33f56cec7f53996035a", size = 5905038, upload-time = "2025-09-04T03:37:04.913Z" }, +] + +[[package]] +name = "pyyaml" +version = "6.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631, upload-time = "2024-08-06T20:33:50.674Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527, upload-time = "2024-08-06T20:33:03.001Z" }, + { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446, upload-time = "2024-08-06T20:33:04.33Z" }, +] + +[[package]] +name = "referencing" +version = "0.36.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs", marker = "sys_platform == 'win32'" }, + { name = "rpds-py", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2f/db/98b5c277be99dd18bfd91dd04e1b759cad18d1a338188c936e92f921c7e2/referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa", size = 74744, upload-time = "2025-01-25T08:48:16.138Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/b1/3baf80dc6d2b7bc27a95a67752d0208e410351e3feb4eb78de5f77454d8d/referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0", size = 26775, upload-time = "2025-01-25T08:48:14.241Z" }, +] + +[[package]] +name = "rpds-py" +version = "0.27.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e9/dd/2c0cbe774744272b0ae725f44032c77bdcab6e8bcf544bffa3b6e70c8dba/rpds_py-0.27.1.tar.gz", hash = "sha256:26a1c73171d10b7acccbded82bf6a586ab8203601e565badc74bbbf8bc5a10f8", size = 27479, upload-time = "2025-08-27T12:16:36.024Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/00/95/3c863973d409210da7fb41958172c6b7dbe7fc34e04d3cc1f10bb85e979f/rpds_py-0.27.1-cp313-cp313-win32.whl", hash = "sha256:4f75e4bd8ab8db624e02c8e2fc4063021b58becdbe6df793a8111d9343aec1e3", size = 221462, upload-time = "2025-08-27T12:13:48.742Z" }, + { url = "https://files.pythonhosted.org/packages/ce/2c/5867b14a81dc217b56d95a9f2a40fdbc56a1ab0181b80132beeecbd4b2d6/rpds_py-0.27.1-cp313-cp313-win_amd64.whl", hash = "sha256:f9025faafc62ed0b75a53e541895ca272815bec18abe2249ff6501c8f2e12b83", size = 232034, upload-time = "2025-08-27T12:13:50.11Z" }, + { url = "https://files.pythonhosted.org/packages/c7/78/3958f3f018c01923823f1e47f1cc338e398814b92d83cd278364446fac66/rpds_py-0.27.1-cp313-cp313-win_arm64.whl", hash = "sha256:ed10dc32829e7d222b7d3b93136d25a406ba9788f6a7ebf6809092da1f4d279d", size = 222392, upload-time = "2025-08-27T12:13:52.587Z" }, + { url = "https://files.pythonhosted.org/packages/e5/ee/375469849e6b429b3516206b4580a79e9ef3eb12920ddbd4492b56eaacbe/rpds_py-0.27.1-cp313-cp313t-win32.whl", hash = "sha256:3020724ade63fe320a972e2ffd93b5623227e684315adce194941167fee02688", size = 216714, upload-time = "2025-08-27T12:14:13.629Z" }, + { url = "https://files.pythonhosted.org/packages/21/87/3fc94e47c9bd0742660e84706c311a860dcae4374cf4a03c477e23ce605a/rpds_py-0.27.1-cp313-cp313t-win_amd64.whl", hash = "sha256:8ee50c3e41739886606388ba3ab3ee2aae9f35fb23f833091833255a31740797", size = 228943, upload-time = "2025-08-27T12:14:14.937Z" }, +] + +[[package]] +name = "ruff" +version = "0.13.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6e/1a/1f4b722862840295bcaba8c9e5261572347509548faaa99b2d57ee7bfe6a/ruff-0.13.0.tar.gz", hash = "sha256:5b4b1ee7eb35afae128ab94459b13b2baaed282b1fb0f472a73c82c996c8ae60", size = 5372863, upload-time = "2025-09-10T16:25:37.917Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/46/09/dca8df3d48e8b3f4202bf20b1658898e74b6442ac835bfe2c1816d926697/ruff-0.13.0-py3-none-win32.whl", hash = "sha256:4e473e8f0e6a04e4113f2e1de12a5039579892329ecc49958424e5568ef4f768", size = 12141613, upload-time = "2025-09-10T16:25:28.664Z" }, + { url = "https://files.pythonhosted.org/packages/61/21/0647eb71ed99b888ad50e44d8ec65d7148babc0e242d531a499a0bbcda5f/ruff-0.13.0-py3-none-win_amd64.whl", hash = "sha256:48e5c25c7a3713eea9ce755995767f4dcd1b0b9599b638b12946e892123d1efb", size = 13258250, upload-time = "2025-09-10T16:25:31.773Z" }, + { url = "https://files.pythonhosted.org/packages/e1/a3/03216a6a86c706df54422612981fb0f9041dbb452c3401501d4a22b942c9/ruff-0.13.0-py3-none-win_arm64.whl", hash = "sha256:ab80525317b1e1d38614addec8ac954f1b3e662de9d59114ecbf771d00cf613e", size = 12312357, upload-time = "2025-09-10T16:25:35.595Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] + +[[package]] +name = "unittest-xml-reporting" +version = "3.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "lxml", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ed/40/3bf1afc96e93c7322520981ac4593cbb29daa21b48d32746f05ab5563dca/unittest-xml-reporting-3.2.0.tar.gz", hash = "sha256:edd8d3170b40c3a81b8cf910f46c6a304ae2847ec01036d02e9c0f9b85762d28", size = 18002, upload-time = "2022-01-20T19:09:55.76Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/39/88/f6e9b87428584a3c62cac768185c438ca6d561367a5d267b293259d76075/unittest_xml_reporting-3.2.0-py2.py3-none-any.whl", hash = "sha256:f3d7402e5b3ac72a5ee3149278339db1a8f932ee405f48bcb9c681372f2717d5", size = 20936, upload-time = "2022-01-20T19:09:53.824Z" }, +] + +[[package]] +name = "virtualenv" +version = "20.34.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "distlib", marker = "sys_platform == 'win32'" }, + { name = "filelock", marker = "sys_platform == 'win32'" }, + { name = "platformdirs", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1c/14/37fcdba2808a6c615681cd216fecae00413c9dab44fb2e57805ecf3eaee3/virtualenv-20.34.0.tar.gz", hash = "sha256:44815b2c9dee7ed86e387b842a84f20b93f7f417f95886ca1996a72a4138eb1a", size = 6003808, upload-time = "2025-08-13T14:24:07.464Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/06/04c8e804f813cf972e3262f3f8584c232de64f0cde9f703b46cf53a45090/virtualenv-20.34.0-py3-none-any.whl", hash = "sha256:341f5afa7eee943e4984a9207c025feedd768baff6753cd660c857ceb3e36026", size = 5983279, upload-time = "2025-08-13T14:24:05.111Z" }, +] diff --git a/venvUtils/ensureAndActivate.ps1 b/venvUtils/ensureAndActivate.ps1 deleted file mode 100644 index 3385e5a..0000000 --- a/venvUtils/ensureAndActivate.ps1 +++ /dev/null @@ -1,8 +0,0 @@ -# this script ensures the NVDA build system Python virtual environment is created and up to date, -# and then activates it. -# this script should be used only in the case where many commands will be executed within the environment and the shell will be eventually thrown away. -# E.g. an Appveyor build. -py -3.13-64 "$PSScriptRoot\ensureVenv.py" -if ($LASTEXITCODE -eq 1) {exit 1} -. "$PSScriptRoot\..\.venv\scripts\activate.ps1" -Set-Variable NVDA_VENV $ENV:VIRTUAL_ENV diff --git a/venvUtils/ensureVenv.py b/venvUtils/ensureVenv.py deleted file mode 100644 index 71b7740..0000000 --- a/venvUtils/ensureVenv.py +++ /dev/null @@ -1,138 +0,0 @@ -# A part of NonVisual Desktop Access (NVDA) -# Copyright (C) 2021 NV Access Limited -# This file may be used under the terms of the GNU General Public License, version 2 or later. -# For more details see: https://www.gnu.org/licenses/gpl-2.0.html - - -import sys -import os -import subprocess -import shutil -from typing import Set - -""" -A script to ensure that the NVDA build system's Python virtual environment is created and up to date. -""" - -top_dir: str = os.path.join(os.path.abspath(os.path.dirname(__file__)), "..") -venv_path: str = os.path.join(top_dir, ".venv") -requirements_path: str = os.path.join(top_dir, "requirements.txt") -venv_orig_requirements_path: str = os.path.join(venv_path, "_requirements.txt") -venv_python_version_path: str = os.path.join(venv_path, "python_version") - - -def askYesNoQuestion(message: str) -> bool: - """ - Displays the given message to the user and accepts y or n as input. - Any other input causes the question to be asked again. - @returns: True for y and n for False. - """ - while True: - answer = input( - message + " [y/n]: " - ) - if answer == 'n': - return False - elif answer == 'y': - return True - else: - continue # ask again - - -def fetchRequirementsSet(path: str) -> Set[str]: - """ - Fetches all the package lines from a pip requirements.txt file - returning them as a set of strings. - The returned set could be compared with a set from another file - which would allow easy identification of which requirements were added or removed. - """ - with open(path, "r") as f: - lines = [x.strip() for x in f.readlines()] - lines = [x for x in lines if x and not x.isspace() and not x.startswith('#')] - return set(lines) - - -def createVenvAndPopulate(): - """ - Creates the NVDA build system's Python virtual environment and installs all required packages. - this function will overwrite any existing virtual environment found at c{venv_path}. - """ - print("Creating virtual environment...", flush=True) - subprocess.run( - [ - sys.executable, - "-m", "venv", - "--clear", - venv_path, - ], - check=True - ) - with open(venv_python_version_path, "w") as f: - f.write(sys.version) - print("Installing packages in virtual environment...", flush=True) - subprocess.run( - [ - # Activate virtual environment - os.path.join(venv_path, "scripts", "activate.bat"), - "&&", - # Ensure we have the latest version of pip - "py", "-m", "pip", - "install", "--upgrade", "pip", - "&&", - # Install required packages with pip - "py", "-m", "pip", - "install", "-r", requirements_path, - ], - check=True, - shell=True, - ) - shutil.copy(requirements_path, venv_orig_requirements_path) - - -def ensureVenvAndRequirements(): - """ - Ensures that the NVDA build system's Python virtual environment is created and up to date. - If a previous virtual environment exists but has a miss-matching Python version - or pip package requirements have changed, - The virtual environment is recreated with the updated version of Python and packages. - If a virtual environment is found but does not seem to be ours, - This function asks the user if it should be overwritten or not. - """ - if not os.path.exists(venv_path): - print("Virtual environment does not exist.") - return createVenvAndPopulate() - if ( - not os.path.exists(venv_python_version_path) - or not os.path.exists(venv_orig_requirements_path) - ): - if askYesNoQuestion( - f"Virtual environment at {venv_path} probably not created by NVDA. " - "This directory must be removed before continuing. Should it be removed?" - ): - return createVenvAndPopulate() - else: - print("Aborting") - sys.exit(1) - venv_python_version = open(venv_python_version_path, "r").read() - if venv_python_version != sys.version: - print(f"Python version changed. Was {venv_python_version}, now is {sys.version}") - return createVenvAndPopulate() - oldRequirements = fetchRequirementsSet(venv_orig_requirements_path) - newRequirements = fetchRequirementsSet(requirements_path) - addedRequirements = newRequirements - oldRequirements - if addedRequirements: - print(f"Added or changed package requirements. {addedRequirements}") - return createVenvAndPopulate() - - -if __name__ == '__main__': - # Ensure we are not inside an already active Python virtual environment. - print(f"Python version {sys.version}") - virtualEnv = os.getenv("VIRTUAL_ENV") - if virtualEnv: - print( - "Error: It looks like another Python virtual environment is already active in this shell.\n" - "Please deactivate the current Python virtual environment and try again." - ) - sys.exit(1) - ensureVenvAndRequirements() diff --git a/venvUtils/venvCmd.ps1 b/venvUtils/venvCmd.ps1 deleted file mode 100644 index c905d2b..0000000 --- a/venvUtils/venvCmd.ps1 +++ /dev/null @@ -1,26 +0,0 @@ -# this script executes the single given command and arguments inside the NVDA build system's Python virtual environment. -# It activates the environment, creating / updating it first if necessary, -# then executes the command, -# and then finally deactivates the environment. - -# This script also supports running in an already fully activated NVDA Python environment. -# If this is detected, the command is executed directly instead. - -if ("$ENV:VIRTUAL_ENV" -ne "") { - if ("$NVDA_VENV" -ne "$ENV:VIRTUAL_ENV") { - Write-Output "Warning: Detected a custom Python virtual environment. " - Write-Output "It is recommended to run all NVDA build system commands outside of any existing Python virtual environment, unless you really know what you are doing." - } - Write-Output "directly calling $($args[0]) $($args[1])" - . "$ENV:VIRTUAL_ENV/Scripts/python.exe" @args - exit $LASTEXITCODE -} - - -Write-Output "Ensuring NVDA Python virtual environment" -. $PSScriptRoot/ensureAndActivate.ps1 -if ($LASTEXITCODE -eq 1) {exit 1} -Write-Output "calling $($args[0]) $($args[1])" -. "$ENV:VIRTUAL_ENV/Scripts/python.exe" @args -Write-Output "Deactivating NVDA Python virtual environment" -deactivate