diff --git a/.codecov.yml b/.codecov.yml new file mode 100644 index 000000000..e47301f83 --- /dev/null +++ b/.codecov.yml @@ -0,0 +1,54 @@ +--- + +codecov: + notify: + after_n_builds: 21 # Number of test matrix+lint jobs uploading coverage + wait_for_ci: false + + require_ci_to_pass: false + + token: >- # notsecret # repo-scoped, upload-only, stability in fork PRs + 7316089b-55fe-4646-b640-78d84b79d109 + +comment: + require_changes: true + +coverage: + range: 100..100 + status: + patch: + default: + target: 100% + pytest: + target: 100% + flags: + - pytest + typing: + flags: + - MyPy + project: + default: + target: 95% + lib: + flags: + - pytest + paths: + - src/ + target: 100% + tests: + flags: + - pytest + paths: + - tests/ + target: 100% + typing: + flags: + - MyPy + target: 90% + +github_checks: + # Annotations are deprecated in Codecov because they are misleading. + # Ref: https://github.com/codecov/codecov-action/issues/1710 + annotations: false + +... diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 000000000..ffab40193 --- /dev/null +++ b/.coveragerc @@ -0,0 +1,34 @@ +[html] +show_contexts = true +skip_covered = false + +[paths] +_site-packages-to-src-mapping = + src + */src + *\src + */lib/pypy*/site-packages + */lib/python*/site-packages + *\Lib\site-packages + +[report] +skip_covered = true +skip_empty = true +show_missing = true +exclude_also = + ^\s*@pytest\.mark\.xfail + ^\s*\.\.\.\s*(#.*)?$ + +[run] +branch = true +cover_pylib = false +# https://coverage.rtfd.io/en/latest/contexts.html#dynamic-contexts +# dynamic_context = test_function # conflicts with `pytest-cov` if set here +parallel = true +plugins = + covdefaults +relative_files = true +source = + . +source_pkgs = + pre_commit_terraform diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml new file mode 100644 index 000000000..f5c0ca61d --- /dev/null +++ b/.github/workflows/ci-cd.yml @@ -0,0 +1,506 @@ +--- + +name: ๐Ÿงช + +on: + merge_group: + push: + branches-ignore: + - dependabot/** # Dependabot always creates PRs + - gh-readonly-queue/** # Temporary merge queue-related GH-made branches + - maintenance/pip-tools-constraint-lockfiles # Lock files through PRs + - maintenance/pip-tools-constraint-lockfiles-** # Lock files through PRs + - patchback/backports/** # Patchback always creates PRs + - pre-commit-ci-update-config # pre-commit.ci always creates a PR + pull_request: + workflow_call: # a way to embed the main tests + +concurrency: + group: >- + ${{ + github.workflow + }}-${{ + github.ref_type + }}-${{ + github.event.pull_request.number || github.sha + }} + cancel-in-progress: true + +env: + FORCE_COLOR: 1 # Request colored output from CLI tools supporting it + MYPY_FORCE_COLOR: 1 # MyPy's color enforcement + PIP_DISABLE_PIP_VERSION_CHECK: 1 # Hide "there's a newer pip" message + PIP_NO_PYTHON_VERSION_WARNING: 1 # Hide "this Python is deprecated" message + PIP_NO_WARN_SCRIPT_LOCATION: 1 # Hide "script dir is not in $PATH" message + PRE_COMMIT_COLOR: always + PROJECT_NAME: pre-commit-terraform + PY_COLORS: 1 # Recognized by the `py` package, dependency of `pytest` + PYTHONIOENCODING: utf-8 + PYTHONUTF8: 1 + TOX_PARALLEL_NO_SPINNER: 1 # Disable tox's parallel run spinner animation + TOX_TESTENV_PASSENV: >- # Make tox-wrapped tools see color requests + FORCE_COLOR + MYPY_FORCE_COLOR + NO_COLOR + PIP_DISABLE_PIP_VERSION_CHECK + PIP_NO_PYTHON_VERSION_WARNING + PIP_NO_WARN_SCRIPT_LOCATION + PRE_COMMIT_COLOR + PY_COLORS + PYTEST_THEME + PYTEST_THEME_MODE + PYTHONIOENCODING + PYTHONLEGACYWINDOWSSTDIO + PYTHONUTF8 + UPSTREAM_REPOSITORY_ID: >- + 69382485 + +run-name: >- + ${{ + github.event_name == 'workflow_dispatch' + && format('๐Ÿ“ฆ Releasing v{0}...', github.event.inputs.release-version) + || '' + }} + ${{ + github.event.pull_request.number && '๐Ÿ”€ PR' || '' + }}${{ + !github.event.pull_request.number && '๐ŸŒฑ Commit' || '' + }} + ${{ github.event.pull_request.number || github.sha }} + triggered by: ${{ github.event_name }} of ${{ + github.ref + }} ${{ + github.ref_type + }} + (workflow run ID: ${{ + github.run_id + }}; number: ${{ + github.run_number + }}; attempt: ${{ + github.run_attempt + }}) + +jobs: + pre-setup: + name: โš™๏ธ Pre-set global build settings + + runs-on: ubuntu-latest + + timeout-minutes: 1 + + defaults: + run: + shell: python + + outputs: + # NOTE: These aren't env vars because the `${{ env }}` context is + # NOTE: inaccessible when passing inputs to reusable workflows. + dists-artifact-name: python-package-distributions + dist-version: ${{ steps.scm-version.outputs.dist-version }} + cache-key-files: >- + ${{ steps.calc-cache-key-files.outputs.files-hash-key }} + git-tag: ${{ steps.git-tag.outputs.tag }} + sdist-artifact-name: ${{ steps.artifact-name.outputs.sdist }} + wheel-artifact-name: ${{ steps.artifact-name.outputs.wheel }} + upstream-repository-id: ${{ env.UPSTREAM_REPOSITORY_ID }} + + steps: + - name: Switch to using Python 3.13 by default + uses: actions/setup-python@v5 + with: + python-version: 3.13 + - name: Check out src from Git + uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: >- + Calculate Python interpreter version hash value + for use in the cache key + id: calc-cache-key-py + run: | + from hashlib import sha512 + from os import environ + from pathlib import Path + from sys import version + + FILE_APPEND_MODE = 'a' + + hash = sha512(version.encode()).hexdigest() + + with Path(environ['GITHUB_OUTPUT']).open( + mode=FILE_APPEND_MODE, + ) as outputs_file: + print(f'py-hash-key={hash}', file=outputs_file) + - name: >- + Calculate dependency files' combined hash value + for use in the cache key + id: calc-cache-key-files + run: | + from os import environ + from pathlib import Path + + FILE_APPEND_MODE = 'a' + + with Path(environ['GITHUB_OUTPUT']).open( + mode=FILE_APPEND_MODE, + ) as outputs_file: + print( + "files-hash-key=${{ + hashFiles( + 'tox.ini', + 'pyproject.toml', + '.pre-commit-config.yaml', + 'pytest.ini', + 'dependencies/**/*' + ) + }}", + file=outputs_file, + ) + - name: Get pip cache dir + id: pip-cache-dir + run: >- + echo "dir=$(python -m pip cache dir)" >> "${GITHUB_OUTPUT}" + shell: bash + - name: Set up pip cache + uses: actions/cache@v4 + with: + path: ${{ steps.pip-cache-dir.outputs.dir }} + key: >- + ${{ runner.os }}-pip-${{ + steps.calc-cache-key-py.outputs.py-hash-key }}-${{ + steps.calc-cache-key-files.outputs.files-hash-key }} + restore-keys: | + ${{ runner.os }}-pip-${{ + steps.calc-cache-key-py.outputs.py-hash-key + }}- + ${{ runner.os }}-pip- + ${{ runner.os }}- + - name: Drop Git tags from HEAD for non-release requests + run: >- + git tag --points-at HEAD + | + xargs git tag --delete + shell: bash + - name: Set up versioning prerequisites + run: >- + python -m + pip install + --user + setuptools-scm + shell: bash + - name: Set the current dist version from Git + id: scm-version + run: | + from os import environ + from pathlib import Path + + import setuptools_scm + + FILE_APPEND_MODE = 'a' + + ver = setuptools_scm.get_version() + with Path(environ['GITHUB_OUTPUT']).open( + mode=FILE_APPEND_MODE, + ) as outputs_file: + print(f'dist-version={ver}', file=outputs_file) + print( + f'dist-version-for-filenames={ver.replace("+", "-")}', + file=outputs_file, + ) + - name: Set the target Git tag + id: git-tag + run: | + from os import environ + from pathlib import Path + + FILE_APPEND_MODE = 'a' + + with Path(environ['GITHUB_OUTPUT']).open( + mode=FILE_APPEND_MODE, + ) as outputs_file: + print( + "tag=v${{ + steps.scm-version.outputs.dist-version + }}", + file=outputs_file, + ) + - name: Set the expected dist artifact names + id: artifact-name + run: | + from os import environ + from pathlib import Path + + FILE_APPEND_MODE = 'a' + + whl_file_prj_base_name = '${{ env.PROJECT_NAME }}'.replace('-', '_') + sdist_file_prj_base_name = whl_file_prj_base_name.replace('.', '_') + + with Path(environ['GITHUB_OUTPUT']).open( + mode=FILE_APPEND_MODE, + ) as outputs_file: + print( + f"sdist={sdist_file_prj_base_name !s}-${{ + steps.scm-version.outputs.dist-version + }}.tar.gz", + file=outputs_file, + ) + print( + f"wheel={whl_file_prj_base_name !s}-${{ + steps.scm-version.outputs.dist-version + }}-py3-none-any.whl", + file=outputs_file, + ) + + build: + name: ๐Ÿ“ฆ ${{ needs.pre-setup.outputs.git-tag }} + needs: + - pre-setup + + runs-on: ubuntu-latest + + timeout-minutes: 2 + + env: + TOXENV: cleanup-dists,build-dists + + outputs: + dists-base64-hash: ${{ steps.dist-hashes.outputs.combined-hash }} + + steps: + - name: Switch to using Python 3.13 + uses: actions/setup-python@v5 + with: + python-version: 3.13 + + - name: Grab the source from Git + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: >- + Calculate Python interpreter version hash value + for use in the cache key + id: calc-cache-key-py + run: | + from hashlib import sha512 + from os import environ + from pathlib import Path + from sys import version + + FILE_APPEND_MODE = 'a' + + hash = sha512(version.encode()).hexdigest() + + with Path(environ['GITHUB_OUTPUT']).open( + mode=FILE_APPEND_MODE, + ) as outputs_file: + print(f'py-hash-key={hash}', file=outputs_file) + shell: python + - name: Get pip cache dir + id: pip-cache-dir + run: >- + echo "dir=$(python -m pip cache dir)" >> "${GITHUB_OUTPUT}" + - name: Set up pip cache + uses: actions/cache@v4 + with: + path: ${{ steps.pip-cache-dir.outputs.dir }} + key: >- + ${{ runner.os }}-pip-${{ + steps.calc-cache-key-py.outputs.py-hash-key }}-${{ + needs.pre-setup.outputs.cache-key-files }} + restore-keys: | + ${{ runner.os }}-pip-${{ + steps.calc-cache-key-py.outputs.py-hash-key + }}- + ${{ runner.os }}-pip- + + - name: Install tox + run: >- + python -Im pip install tox + shell: bash # windows compat + + - name: Pre-populate the tox env + run: >- + python -m + tox + --parallel auto + --parallel-live + --skip-missing-interpreters false + --notest + + - name: Set static timestamp for dist build reproducibility + # ... from the last Git commit since it's immutable + run: >- + echo "SOURCE_DATE_EPOCH=$(git log -1 --pretty=%ct)" + >> "${GITHUB_ENV}" + - name: Build dists + run: >- + python -m + tox + --parallel auto + --parallel-live + --skip-missing-interpreters false + --skip-pkg-install + --quiet + - name: Verify that the artifacts with expected names got created + run: >- + ls -1 + 'dist/${{ needs.pre-setup.outputs.sdist-artifact-name }}' + 'dist/${{ needs.pre-setup.outputs.wheel-artifact-name }}' + - name: Generate dist hashes to be used for provenance + id: dist-hashes + run: >- + echo "combined-hash=$( + sha256sum + '${{ needs.pre-setup.outputs.sdist-artifact-name }}' + '${{ needs.pre-setup.outputs.wheel-artifact-name }}' + | base64 -w0 + )" + >> "${GITHUB_OUTPUT}" + working-directory: dist + - name: Store the distribution packages + uses: actions/upload-artifact@v4 + with: + name: >- + ${{ needs.pre-setup.outputs.dists-artifact-name }} + # NOTE: Exact expected file names are specified here + # NOTE: as a safety measure โ€” if anything weird ends + # NOTE: up being in this dir or not all dists will be + # NOTE: produced, this will fail the workflow. + path: | + dist/${{ needs.pre-setup.outputs.sdist-artifact-name }} + dist/${{ needs.pre-setup.outputs.wheel-artifact-name }} + retention-days: 30 + + lint: + name: ๐Ÿงน Linters${{ '' }} # nest jobs under the same sidebar category + needs: + - build + - pre-setup # transitive, for accessing settings + strategy: + matrix: + runner-vm-os: + - ubuntu-latest + python-version: + - 3.13 + toxenv: + - pre-commit + - metadata-validation + environment-variables: + - >- # only affects pre-commit, set for all for simplicity: + SKIP=hadolint,shfmt + tox-run-posargs: + - '' + xfail: + - false + check-name: + - '' + fail-fast: false + uses: ./.github/workflows/reusable-tox.yml + with: + cache-key-files: >- + ${{ needs.pre-setup.outputs.cache-key-files }} + check-name: >- + ${{ matrix.check-name }} + dists-artifact-name: >- + ${{ needs.pre-setup.outputs.dists-artifact-name }} + environment-variables: >- + ${{ matrix.environment-variables }} + python-version: >- + ${{ matrix.python-version }} + runner-vm-os: >- + ${{ matrix.runner-vm-os }} + source-tarball-name: >- + ${{ needs.pre-setup.outputs.sdist-artifact-name }} + timeout-minutes: 3 + toxenv: >- + ${{ matrix.toxenv }} + tox-run-posargs: >- + ${{ matrix.tox-run-posargs }} + upstream-repository-id: >- + ${{ needs.pre-setup.outputs.upstream-repository-id }} + xfail: ${{ fromJSON(matrix.xfail) }} + secrets: + codecov-token: ${{ secrets.CODECOV_TOKEN }} + + tests: + name: ๐Ÿงช Tests${{ '' }} # nest jobs under the same sidebar category + needs: + - build + - pre-setup # transitive, for accessing settings + strategy: + matrix: + python-version: + # NOTE: The latest and the lowest supported Pythons are prioritized + # NOTE: to improve the responsiveness. It's nice to see the most + # NOTE: important results first. + - 3.13 + - 3.9 + - >- # str + 3.10 + - 3.12 + - 3.11 + runner-vm-os: + - ubuntu-24.04 + - macos-14 + - macos-13 + - windows-2025 + toxenv: + - py + xfail: + - false + + uses: ./.github/workflows/reusable-tox.yml + with: + built-wheel-names: >- + ${{ needs.pre-setup.outputs.wheel-artifact-name }} + cache-key-files: >- + ${{ needs.pre-setup.outputs.cache-key-files }} + dists-artifact-name: >- + ${{ needs.pre-setup.outputs.dists-artifact-name }} + python-version: >- + ${{ matrix.python-version }} + runner-vm-os: >- + ${{ matrix.runner-vm-os }} + source-tarball-name: >- + ${{ needs.pre-setup.outputs.sdist-artifact-name }} + timeout-minutes: 5 + toxenv: >- + ${{ matrix.toxenv }} + tox-run-posargs: >- + --cov-report=xml:.tox/.tmp/.test-results/pytest-${{ + matrix.python-version + }}/cobertura.xml + --junitxml=.tox/.tmp/.test-results/pytest-${{ + matrix.python-version + }}/test.xml + tox-rerun-posargs: >- + -rA + -vvvvv + --lf + --no-cov + --no-fold-skipped + upstream-repository-id: >- + ${{ needs.pre-setup.outputs.upstream-repository-id }} + xfail: ${{ fromJSON(matrix.xfail) }} + secrets: + codecov-token: ${{ secrets.CODECOV_TOKEN }} + + check: # This job does nothing and is only used for the branch protection + if: always() + + needs: + - lint + - tests + + runs-on: ubuntu-latest + + timeout-minutes: 1 + + steps: + - name: Decide whether the needed jobs succeeded or failed + uses: re-actors/alls-green@release/v1 + with: + jobs: ${{ toJSON(needs) }} + +... diff --git a/.github/workflows/reusable-tox.yml b/.github/workflows/reusable-tox.yml new file mode 100644 index 000000000..b540075e4 --- /dev/null +++ b/.github/workflows/reusable-tox.yml @@ -0,0 +1,418 @@ +--- + +name: >- + โŒ + [DO NOT CLICK] + Reusable Tox + +on: + workflow_call: + inputs: + built-wheel-names: + description: >- + A glob for the built distributions in the artifact + to test (is installed into tox env if passed) + required: false + type: string + cache-key-files: + description: Dependency files cache + required: true + type: string + check-name: + description: A custom name for the Checks API-reported status + required: false + type: string + dists-artifact-name: + description: Workflow artifact name containing dists + required: true + type: string + environment-variables: + description: >- + A newline-delimited blob of text with environment variables + to be set using `${GITHUB_ENV}` + required: false + type: string + python-version: + description: Python version to provision in the VM + required: true + type: string + release-requested: + description: Flag whether this is CI run is a release request + default: 'false' + required: false + type: string + runner-vm-os: + description: VM OS to use + default: ubuntu + required: false + type: string + source-tarball-name: + description: Sdist filename wildcard + required: true + type: string + timeout-minutes: + description: Deadline for the job to complete + required: true + type: string + toxenv: + description: Name of the tox environment to use + required: true + type: string + tox-run-posargs: + description: Positional arguments to pass to the regular tox run + required: false + type: string + tox-rerun-posargs: + description: Positional arguments to pass to the re-attempted tox run + required: false + type: string + upstream-repository-id: + description: ID of the upstream GitHub Repository + required: true + type: string + xfail: + description: >- + Whether this job is expected to fail. Controls if the run outcomes + contribute to the failing CI status or not. The job status will be + treated as successful if this is set to `true`. Setting `false` + should be preferred typically. + required: true + type: string + secrets: + codecov-token: + description: Mandatory token for uploading to Codecov + required: true + +env: + COLOR: >- # Supposedly, pytest or coveragepy use this + yes + FORCE_COLOR: 1 # Request colored output from CLI tools supporting it + MYPY_FORCE_COLOR: 1 # MyPy's color enforcement + PIP_DISABLE_PIP_VERSION_CHECK: 1 + PIP_NO_PYTHON_VERSION_WARNING: 1 + PIP_NO_WARN_SCRIPT_LOCATION: 1 + PRE_COMMIT_COLOR: always + PY_COLORS: 1 # Recognized by the `py` package, dependency of `pytest` + PYTHONIOENCODING: utf-8 + PYTHONUTF8: 1 + TOX_PARALLEL_NO_SPINNER: 1 + TOX_TESTENV_PASSENV: >- # Make tox-wrapped tools see color requests + COLOR + FORCE_COLOR + MYPY_FORCE_COLOR + NO_COLOR + PIP_DISABLE_PIP_VERSION_CHECK + PIP_NO_PYTHON_VERSION_WARNING + PIP_NO_WARN_SCRIPT_LOCATION + PRE_COMMIT_COLOR + PY_COLORS + PYTEST_THEME + PYTEST_THEME_MODE + PYTHONIOENCODING + PYTHONLEGACYWINDOWSSTDIO + PYTHONUTF8 + +jobs: + tox: + name: >- + ${{ + inputs.check-name + && inputs.check-name + || format( + '{0}@๐Ÿ{1}@{2}', + inputs.toxenv, + inputs.python-version, + inputs.runner-vm-os + ) + }} + + runs-on: ${{ inputs.runner-vm-os }} + + timeout-minutes: ${{ fromJSON(inputs.timeout-minutes) }} + + continue-on-error: >- + ${{ + ( + fromJSON(inputs.xfail) || + ( + startsWith(inputs.python-version, '~') + ) || + contains(inputs.python-version, 'alpha') + ) && true || false + }} + + env: + TOXENV: ${{ inputs.toxenv }} + + steps: + - name: Export requested job-global environment variables + if: inputs.environment-variables != '' + run: >- + echo '${{ inputs.environment-variables }}' + >> "${GITHUB_ENV}" + + - name: >- + Switch to using Python v${{ inputs.python-version }} + by default + id: python-install + uses: actions/setup-python@v5 + with: + python-version: ${{ inputs.python-version }} + + # NOTE: `pre-commit --show-diff-on-failure` and `sphinxcontrib-spellcheck` + # NOTE: with Git authors allowlist enabled both depend on the presence of a + # NOTE: Git repository. + - name: Grab the source from Git + if: >- + contains(fromJSON('["pre-commit", "spellcheck-docs"]'), inputs.toxenv) + uses: actions/checkout@v4 + with: + ref: ${{ github.event.inputs.release-committish }} + - name: Retrieve the project source from an sdist inside the GHA artifact + if: >- + !contains(fromJSON('["pre-commit", "spellcheck-docs"]'), inputs.toxenv) + uses: re-actors/checkout-python-sdist@release/v2 + with: + source-tarball-name: ${{ inputs.source-tarball-name }} + workflow-artifact-name: ${{ inputs.dists-artifact-name }} + + - name: Cache pre-commit.com virtualenvs + if: inputs.toxenv == 'pre-commit' + uses: actions/cache@v4 + with: + path: ~/.cache/pre-commit + key: >- + ${{ + runner.os + }}-pre-commit-${{ + hashFiles('.pre-commit-config.yaml') + }} + + - name: Figure out if the interpreter ABI is stable + id: py-abi + run: | + from os import environ + from pathlib import Path + from sys import version_info + + FILE_APPEND_MODE = 'a' + + is_stable_abi = version_info.releaselevel == 'final' + + with Path(environ['GITHUB_OUTPUT']).open( + mode=FILE_APPEND_MODE, + ) as outputs_file: + print( + 'is-stable-abi={is_stable_abi}'. + format(is_stable_abi=str(is_stable_abi).lower()), + file=outputs_file, + ) + shell: python + - name: >- + Calculate Python interpreter version hash value + for use in the cache key + if: fromJSON(steps.py-abi.outputs.is-stable-abi) + id: calc-cache-key-py + run: | + from hashlib import sha512 + from os import environ + from pathlib import Path + from sys import version + + FILE_APPEND_MODE = 'a' + + hash = sha512(version.encode()).hexdigest() + + with Path(environ['GITHUB_OUTPUT']).open( + mode=FILE_APPEND_MODE, + ) as outputs_file: + print(f'py-hash-key={hash}', file=outputs_file) + shell: python + - name: Get pip cache dir + if: fromJSON(steps.py-abi.outputs.is-stable-abi) + id: pip-cache-dir + run: >- + echo "dir=$(python -Im pip cache dir)" >> "${GITHUB_OUTPUT}" + shell: bash + - name: Set up pip cache + if: fromJSON(steps.py-abi.outputs.is-stable-abi) + uses: actions/cache@v4 + with: + path: ${{ steps.pip-cache-dir.outputs.dir }} + key: >- + ${{ runner.os }}-pip-${{ + steps.calc-cache-key-py.outputs.py-hash-key }}-${{ + inputs.cache-key-files }} + restore-keys: | + ${{ runner.os }}-pip-${{ + steps.calc-cache-key-py.outputs.py-hash-key + }}- + ${{ runner.os }}-pip- + + - name: Install tox + run: >- + python -Im pip install tox + shell: bash # windows compat + + - name: Make the env clean of non-test files + if: inputs.toxenv == 'metadata-validation' + run: | + shopt -s extglob + rm -rf !tox.ini + shell: bash + - name: Download all the dists + if: >- + contains(fromJSON('["metadata-validation", "py"]'), inputs.toxenv) + uses: actions/download-artifact@v4 + with: + name: ${{ inputs.dists-artifact-name }} + path: dist/ + + - name: >- + Pre-populate tox envs: `${{ env.TOXENV }}` + run: >- + python -Im + tox + --parallel auto + --parallel-live + --skip-missing-interpreters false + ${{ + inputs.built-wheel-names != '' + && format('--installpkg dist/{0}', inputs.built-wheel-names) + || '' + }} + --notest + - name: Initialize pre-commit envs if needed + if: inputs.toxenv == 'pre-commit' + run: >- + python -Im + tox + exec + --skip-pkg-install + --quiet + -- + python -Im pre_commit install-hooks + - name: >- + Run tox envs: `${{ env.TOXENV }}` + id: tox-run + run: >- + python -Im + tox + --parallel auto + --parallel-live + --skip-missing-interpreters false + --skip-pkg-install + --quiet + ${{ + inputs.tox-run-posargs != '' + && format('-- {0}', inputs.tox-run-posargs) + || '' + }} + - name: Produce markdown test summary from JUnit + if: >- + !cancelled() + && steps.tox-run.outputs.test-result-files != '' + uses: test-summary/action@v2.3 + with: + paths: >- + ${{ steps.tox-run.outputs.test-result-files }} + - name: Produce markdown test summary from Cobertura XML + # NOTE: MyPy is temporarily excluded because it produces incomplete XML + # NOTE: files that `irongut/CodeCoverageSummary` can't stomach. + # Refs: + # * https://github.com/irongut/CodeCoverageSummary/issues/324 + # * https://github.com/python/mypy/issues/17689 + # FIXME: Revert the exclusion once upstream fixes the bug. + if: >- + !cancelled() + && runner.os == 'Linux' + && steps.tox-run.outputs.cov-report-files != '' + && steps.tox-run.outputs.test-result-files == '' + && steps.tox-run.outputs.codecov-flags != 'MyPy' + uses: irongut/CodeCoverageSummary@v1.3.0 + with: + badge: true + filename: >- + ${{ steps.tox-run.outputs.cov-report-files }} + format: markdown + output: both + # Ref: https://github.com/irongut/CodeCoverageSummary/issues/66 + - name: Append coverage results to Job Summary + if: >- + !cancelled() + && runner.os == 'Linux' + && steps.tox-run.outputs.cov-report-files != '' + && steps.tox-run.outputs.test-result-files == '' + && steps.tox-run.outputs.codecov-flags != 'MyPy' + run: >- + cat code-coverage-results.md >> "${GITHUB_STEP_SUMMARY}" + - name: Re-run the failing tests with maximum verbosity + if: >- + !cancelled() + && failure() + && inputs.tox-rerun-posargs != '' + run: >- # `exit 1` makes sure that the job remains red with flaky runs + python -Im + tox + --parallel auto + --parallel-live + --skip-missing-interpreters false + -vvvvv + --skip-pkg-install + -- + ${{ inputs.tox-rerun-posargs }} + && exit 1 + shell: bash + - name: Send coverage data to Codecov + if: >- + !cancelled() + && steps.tox-run.outputs.cov-report-files != '' + uses: codecov/codecov-action@v4 + with: + disable_search: true + fail_ci_if_error: >- + ${{ toJSON(inputs.upstream-repository-id == github.repository_id) }} + files: >- + ${{ steps.tox-run.outputs.cov-report-files }} + flags: >- + CI-GHA, + ${{ steps.tox-run.outputs.codecov-flags }}, + OS-${{ + runner.os + }}, + VM-${{ + inputs.runner-vm-os + }}, + Py-${{ + steps.python-install.outputs.python-version + }} + token: ${{ secrets.codecov-token }} + - name: Upload test results to Codecov + if: >- + !cancelled() + && steps.tox-run.outputs.test-result-files != '' + # FIXME: revert to v1 once Codecov releases v1.0.2 of their action. + # Ref: https://github.com/codecov/test-results-action/issues/108. + # uses: codecov/test-results-action@v1 + uses: codecov/test-results-action@4e79e65778be1cecd5df25e14af1eafb6df80ea9 + with: + disable_search: true + fail_ci_if_error: >- + ${{ toJSON(inputs.upstream-repository-id == github.repository_id) }} + files: >- + ${{ steps.tox-run.outputs.test-result-files }} + flags: >- + CI-GHA, + ${{ steps.tox-run.outputs.codecov-flags }}, + OS-${{ + runner.os + }}, + VM-${{ + inputs.runner-vm-os + }}, + Py-${{ + steps.python-install.outputs.python-version + }} + token: ${{ secrets.codecov-token }} + +... diff --git a/.github/workflows/scheduled-runs.yml b/.github/workflows/scheduled-runs.yml new file mode 100644 index 000000000..9d8403cf2 --- /dev/null +++ b/.github/workflows/scheduled-runs.yml @@ -0,0 +1,41 @@ +--- + +name: โณ + +on: + pull_request: + paths: # only changes to this workflow itself trigger PR testing + - .github/workflows/scheduled-runs.yml + schedule: + - cron: 3 5 * * * # run daily at 5:03 UTC + workflow_dispatch: # manual trigger + +run-name: >- + ๐ŸŒƒ + Nightly run of + ${{ + github.event.pull_request.number && 'PR' || '' + }}${{ + !github.event.pull_request.number && 'Commit' || '' + }} + ${{ github.event.pull_request.number || github.sha }} + triggered by: ${{ github.event_name }} of ${{ + github.ref + }} ${{ + github.ref_type + }} + (workflow run ID: ${{ + github.run_id + }}; number: ${{ + github.run_number + }}; attempt: ${{ + github.run_attempt + }}) + +jobs: + main-ci-cd-pipeline: + name: ๐Ÿงช Main CI/CD pipeline + uses: ./.github/workflows/ci-cd.yml + secrets: inherit + +... diff --git a/.mypy.ini b/.mypy.ini new file mode 100644 index 000000000..a92387a64 --- /dev/null +++ b/.mypy.ini @@ -0,0 +1,69 @@ +[mypy] +python_version = 3.9 +color_output = true +error_summary = true +# IMPORTANT: The file list MUST NOT have a trailing comma after the last entry. +# Ref: https://github.com/python/mypy/issues/11171#issuecomment-2567150548 +files = + src/, + tests/pytest/ + +check_untyped_defs = true + +disallow_any_explicit = true +disallow_any_expr = true +disallow_any_decorated = true +disallow_any_generics = true +disallow_any_unimported = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true + +enable_error_code = + ignore-without-code + +explicit_package_bases = true + +extra_checks = true + +follow_imports = normal + +ignore_missing_imports = false + +local_partial_types = true + +mypy_path = ${MYPY_CONFIG_FILE_DIR}/src:${MYPY_CONFIG_FILE_DIR}/_type_stubs + +namespace_packages = true + +no_implicit_reexport = true + +pretty = true + +show_column_numbers = true +show_error_code_links = true +show_error_codes = true +show_error_context = true +show_error_end = true + +# `strict` will pick up any future strictness-related settings: +strict = true +strict_equality = true +strict_optional = true + +warn_no_return = true +warn_redundant_casts = true +warn_return_any = true +warn_unused_configs = true +warn_unused_ignores = true + +[mypy-tests.*] +# crashes with some decorators like `@pytest.mark.parametrize`: +disallow_any_expr = false +# fails on `@hypothesis.given()`: +disallow_any_decorated = false + +[mypy-tests.pytest.*] +disable_error_code = attr-defined diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 1db1d62b4..75a649ea1 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -35,6 +35,65 @@ repos: args: ['--allow-missing-credentials'] - id: detect-private-key + # Non-modifying checks: + - id: name-tests-test + files: >- + ^tests/[^_].*\.py$ + +- repo: https://github.com/pre-commit/mirrors-mypy.git + rev: v1.14.1 + hooks: + - id: mypy + alias: mypy-py313 + name: MyPy, for Python 3.13 + additional_dependencies: + - lxml # dep of `--txt-report`, `--cobertura-xml-report` & `--html-report` + - pytest + - pytest-mock + args: + - --python-version=3.13 + - --any-exprs-report=.tox/.tmp/.test-results/mypy--py-3.13 + - --cobertura-xml-report=.tox/.tmp/.test-results/mypy--py-3.13 + - --html-report=.tox/.tmp/.test-results/mypy--py-3.13 + - --linecount-report=.tox/.tmp/.test-results/mypy--py-3.13 + - --linecoverage-report=.tox/.tmp/.test-results/mypy--py-3.13 + - --lineprecision-report=.tox/.tmp/.test-results/mypy--py-3.13 + - --txt-report=.tox/.tmp/.test-results/mypy--py-3.13 + pass_filenames: false + - id: mypy + alias: mypy-py311 + name: MyPy, for Python 3.11 + additional_dependencies: + - lxml # dep of `--txt-report`, `--cobertura-xml-report` & `--html-report` + - pytest + - pytest-mock + args: + - --python-version=3.11 + - --any-exprs-report=.tox/.tmp/.test-results/mypy--py-3.11 + - --cobertura-xml-report=.tox/.tmp/.test-results/mypy--py-3.11 + - --html-report=.tox/.tmp/.test-results/mypy--py-3.11 + - --linecount-report=.tox/.tmp/.test-results/mypy--py-3.11 + - --linecoverage-report=.tox/.tmp/.test-results/mypy--py-3.11 + - --lineprecision-report=.tox/.tmp/.test-results/mypy--py-3.11 + - --txt-report=.tox/.tmp/.test-results/mypy--py-3.11 + pass_filenames: false + - id: mypy + alias: mypy-py39 + name: MyPy, for Python 3.9 + additional_dependencies: + - lxml # dep of `--txt-report`, `--cobertura-xml-report` & `--html-report` + - pytest + - pytest-mock + args: + - --python-version=3.9 + - --any-exprs-report=.tox/.tmp/.test-results/mypy--py-3.9 + - --cobertura-xml-report=.tox/.tmp/.test-results/mypy--py-3.9 + - --html-report=.tox/.tmp/.test-results/mypy--py-3.9 + - --linecount-report=.tox/.tmp/.test-results/mypy--py-3.9 + - --linecoverage-report=.tox/.tmp/.test-results/mypy--py-3.9 + - --lineprecision-report=.tox/.tmp/.test-results/mypy--py-3.9 + - --txt-report=.tox/.tmp/.test-results/mypy--py-3.9 + pass_filenames: false - repo: https://github.com/jumanjihouse/pre-commit-hooks rev: 3.0.0 diff --git a/README.md b/README.md index 164fb8cd9..6bfb50225 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,17 @@ # Collection of git hooks for Terraform to be used with [pre-commit framework](http://pre-commit.com/) [![Github tag](https://img.shields.io/github/tag/antonbabenko/pre-commit-terraform.svg)](https://github.com/antonbabenko/pre-commit-terraform/releases) ![maintenance status](https://img.shields.io/maintenance/yes/2024.svg) [![Help Contribute to Open Source](https://www.codetriage.com/antonbabenko/pre-commit-terraform/badges/users.svg)](https://www.codetriage.com/antonbabenko/pre-commit-terraform) +[![CI/CD Badge]][CI/CD] +[![Codecov Badge]][Codecov] + +[CI/CD Badge]: +https://github.com/antonbabenko/pre-commit-terraform/actions/workflows/ci-cd.yml/badge.svg?branch=master +[CI/CD]: +https://github.com/antonbabenko/pre-commit-terraform/actions/workflows/ci-cd.yml + +[Codecov Badge]: +https://codecov.io/gh/antonbabenko/pre-commit-terraform/branch/master/graph/badge.svg?flags[]=pytest +[Codecov]: https://app.codecov.io/gh/antonbabenko/pre-commit-terraform?flags[]=pytest [![SWUbanner](https://raw.githubusercontent.com/vshymanskyy/StandWithUkraine/main/banner-direct.svg)](https://github.com/vshymanskyy/StandWithUkraine/blob/main/docs/README.md) diff --git a/hatch.toml b/hatch.toml index 053ff6bfb..9d3cd73ea 100644 --- a/hatch.toml +++ b/hatch.toml @@ -1,6 +1,11 @@ [build.targets.sdist] include = [ + '.codecov.yml', + '.coveragerc', 'src/', + 'tests/', + 'pytest.ini', + 'tox.ini', ] [build.targets.wheel] diff --git a/pyproject.toml b/pyproject.toml index e5ff0159a..c8ae73136 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,15 +5,36 @@ requires = [ ] build-backend = 'hatchling.build' +[dependency-groups] +building = [ + 'build', +] +linting = [ + 'pre-commit', +] +testing = [ + 'covdefaults', # sets up `coveragepy` config boilerplate + 'pytest >= 8', + 'pytest-cov', # integrates `coveragepy` into pytest runs + 'pytest-mock', # provides a `mocker` fixture + 'pytest-xdist', # paralellizes tests through subprocesses +] +upstreaming = [ + 'twine', +] + [project] name = 'pre-commit-terraform' classifiers = [ 'License :: OSI Approved :: MIT License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', + 'Programming Language :: Python :: 3.11', + 'Programming Language :: Python :: 3.12', + 'Programming Language :: Python :: 3.13', + 'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: Implementation :: PyPy', ] @@ -23,6 +44,7 @@ dynamic = [ 'urls', 'version', ] +requires-python = ">= 3.9" [[project.authors]] name = 'Anton Babenko' diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 000000000..d16390e6f --- /dev/null +++ b/pytest.ini @@ -0,0 +1,84 @@ +[pytest] +addopts = + # `pytest-xdist`: + --numprocesses=auto + # NOTE: the plugin is disabled because it's slower with so few tests + --numprocesses=0 + + # Show 10 slowest invocations: + --durations=10 + + # Report all the things == -rxXs: + -ra + + # Show values of the local vars in errors/tracebacks: + --showlocals + + # Autocollect and invoke the doctests from all modules: + # https://docs.pytest.org/en/stable/doctest.html + --doctest-modules + + # Pre-load the `pytest-cov` plugin early: + -p pytest_cov + + # `pytest-cov`: + --cov + --cov-config=.coveragerc + --cov-context=test + --no-cov-on-fail + + # Fail on config parsing warnings: + # --strict-config + + # Fail on non-existing markers: + # * Deprecated since v6.2.0 but may be reintroduced later covering a + # broader scope: + # --strict + # * Exists since v4.5.0 (advised to be used instead of `--strict`): + --strict-markers + +doctest_optionflags = ALLOW_UNICODE ELLIPSIS + +# Marks tests with an empty parameterset as xfail(run=False) +empty_parameter_set_mark = xfail + +faulthandler_timeout = 30 + +filterwarnings = + error + +# https://docs.pytest.org/en/stable/usage.html#creating-junitxml-format-files +junit_duration_report = call +# xunit1 contains more metadata than xunit2 so it's better for CI UIs: +junit_family = xunit1 +junit_logging = all +junit_log_passing_tests = true +junit_suite_name = awx_plugins_test_suite + +# A mapping of markers to their descriptions allowed in strict mode: +markers = + +minversion = 6.1.0 + +# Optimize pytest's lookup by restricting potentially deep dir tree scan: +norecursedirs = + build + dependencies + dist + docs + .cache + .eggs + .git + .github + .tox + *.egg + *.egg-info + */*.egg-info + */**/*.egg-info + *.dist-info + */*.dist-info + */**/*.dist-info + +testpaths = tests/pytest/ + +xfail_strict = true diff --git a/src/pre_commit_terraform/_cli.py b/src/pre_commit_terraform/_cli.py index f52a50b0b..edcb2ea30 100644 --- a/src/pre_commit_terraform/_cli.py +++ b/src/pre_commit_terraform/_cli.py @@ -1,6 +1,7 @@ """Outer CLI layer of the app interface.""" -from sys import stderr +import sys +from typing import cast as cast_to from ._cli_parsing import initialize_argument_parser from ._errors import ( @@ -9,7 +10,7 @@ PreCommitTerraformRuntimeError, ) from ._structs import ReturnCode -from ._types import ReturnCodeType +from ._types import CLIAppEntryPointCallableType, ReturnCodeType def invoke_cli_app(cli_args: list[str]) -> ReturnCodeType: @@ -20,29 +21,34 @@ def invoke_cli_app(cli_args: list[str]) -> ReturnCodeType: """ root_cli_parser = initialize_argument_parser() parsed_cli_args = root_cli_parser.parse_args(cli_args) + invoke_cli_app = cast_to( + # FIXME: attempt typing per https://stackoverflow.com/a/75666611/595220 + CLIAppEntryPointCallableType, + parsed_cli_args.invoke_cli_app, + ) try: - return parsed_cli_args.invoke_cli_app(parsed_cli_args) + return invoke_cli_app(parsed_cli_args) except PreCommitTerraformExit as exit_err: - print(f'App exiting: {exit_err !s}', file=stderr) + print(f'App exiting: {exit_err !s}', file=sys.stderr) raise except PreCommitTerraformRuntimeError as unhandled_exc: print( f'App execution took an unexpected turn: {unhandled_exc !s}. ' 'Exiting...', - file=stderr, + file=sys.stderr, ) return ReturnCode.ERROR except PreCommitTerraformBaseError as unhandled_exc: print( f'A surprising exception happened: {unhandled_exc !s}. Exiting...', - file=stderr, + file=sys.stderr, ) return ReturnCode.ERROR except KeyboardInterrupt as ctrl_c_exc: print( f'User-initiated interrupt: {ctrl_c_exc !s}. Exiting...', - file=stderr, + file=sys.stderr, ) return ReturnCode.ERROR diff --git a/src/pre_commit_terraform/_types.py b/src/pre_commit_terraform/_types.py index 99402b447..78db357e7 100644 --- a/src/pre_commit_terraform/_types.py +++ b/src/pre_commit_terraform/_types.py @@ -1,18 +1,20 @@ """Composite types for annotating in-project code.""" from argparse import ArgumentParser, Namespace -from typing import Final, Protocol +from collections.abc import Callable +from typing import Protocol, Union from ._structs import ReturnCode -ReturnCodeType = ReturnCode | int +ReturnCodeType = Union[ReturnCode, int] # Union instead of pipe for Python 3.9 +CLIAppEntryPointCallableType = Callable[[Namespace], ReturnCodeType] class CLISubcommandModuleProtocol(Protocol): """A protocol for the subcommand-implementing module shape.""" - CLI_SUBCOMMAND_NAME: Final[str] + CLI_SUBCOMMAND_NAME: str """This constant contains a CLI.""" def populate_argument_parser( @@ -20,9 +22,7 @@ def populate_argument_parser( ) -> None: """Run a module hook for populating the subcommand parser.""" - def invoke_cli_app( - self, parsed_cli_args: Namespace, - ) -> ReturnCodeType | int: + def invoke_cli_app(self, parsed_cli_args: Namespace) -> ReturnCodeType: """Run a module hook implementing the subcommand logic.""" ... # pylint: disable=unnecessary-ellipsis diff --git a/src/pre_commit_terraform/terraform_docs_replace.py b/src/pre_commit_terraform/terraform_docs_replace.py index b79ba479e..cc83a2a7d 100644 --- a/src/pre_commit_terraform/terraform_docs_replace.py +++ b/src/pre_commit_terraform/terraform_docs_replace.py @@ -2,13 +2,13 @@ import subprocess import warnings from argparse import ArgumentParser, Namespace -from typing import Final +from typing import cast as cast_to from ._structs import ReturnCode from ._types import ReturnCodeType -CLI_SUBCOMMAND_NAME: Final[str] = 'replace-docs' +CLI_SUBCOMMAND_NAME: str = 'replace-docs' def populate_argument_parser(subcommand_parser: ArgumentParser) -> None: @@ -50,7 +50,7 @@ def invoke_cli_app(parsed_cli_args: Namespace) -> ReturnCodeType: ) dirs = [] - for filename in parsed_cli_args.filenames: + for filename in cast_to(list[str], parsed_cli_args.filenames): if (os.path.realpath(filename) not in dirs and (filename.endswith(".tf") or filename.endswith(".tfvars"))): dirs.append(os.path.dirname(filename)) @@ -61,14 +61,14 @@ def invoke_cli_app(parsed_cli_args: Namespace) -> ReturnCodeType: try: procArgs = [] procArgs.append('terraform-docs') - if parsed_cli_args.sort: + if cast_to(bool, parsed_cli_args.sort): procArgs.append('--sort-by-required') procArgs.append('md') procArgs.append("./{dir}".format(dir=dir)) procArgs.append('>') procArgs.append( './{dir}/{dest}'. - format(dir=dir, dest=parsed_cli_args.dest), + format(dir=dir, dest=cast_to(bool, parsed_cli_args.dest)), ) subprocess.check_call(" ".join(procArgs), shell=True) except subprocess.CalledProcessError as e: diff --git a/tests/pytest/_cli_test.py b/tests/pytest/_cli_test.py new file mode 100644 index 000000000..52ea82ab6 --- /dev/null +++ b/tests/pytest/_cli_test.py @@ -0,0 +1,100 @@ +"""Tests for the high-level CLI entry point.""" + +from argparse import ArgumentParser, Namespace +import pytest + +from pre_commit_terraform import _cli_parsing as _cli_parsing_mod +from pre_commit_terraform._cli import invoke_cli_app +from pre_commit_terraform._errors import ( + PreCommitTerraformExit, + PreCommitTerraformBaseError, + PreCommitTerraformRuntimeError, +) +from pre_commit_terraform._structs import ReturnCode +from pre_commit_terraform._types import ReturnCodeType + + +pytestmark = pytest.mark.filterwarnings( + 'ignore:`terraform_docs_replace` hook is DEPRECATED.:UserWarning:' + 'pre_commit_terraform.terraform_docs_replace', +) + + +@pytest.mark.parametrize( + ('raised_error', 'expected_stderr'), + ( + # pytest.param(PreCommitTerraformExit('sentinel'), 'App exiting: sentinel', id='app-exit'), + pytest.param( + PreCommitTerraformRuntimeError('sentinel'), + 'App execution took an unexpected turn: sentinel. Exiting...', + id='app-runtime-exc', + ), + pytest.param( + PreCommitTerraformBaseError('sentinel'), + 'A surprising exception happened: sentinel. Exiting...', + id='app-base-exc', + ), + pytest.param( + KeyboardInterrupt('sentinel'), + 'User-initiated interrupt: sentinel. Exiting...', + id='ctrl-c', + ), + ), +) +def test_known_interrupts( + capsys: pytest.CaptureFixture[str], + expected_stderr: str, + monkeypatch: pytest.MonkeyPatch, + raised_error: BaseException, +) -> None: + """Check that known interrupts are turned into return code 1.""" + class CustomCmdStub: + CLI_SUBCOMMAND_NAME = 'sentinel' + + def populate_argument_parser( + self, subcommand_parser: ArgumentParser, + ) -> None: + return None + + def invoke_cli_app(self, parsed_cli_args: Namespace) -> ReturnCodeType: + raise raised_error + + monkeypatch.setattr( + _cli_parsing_mod, + 'SUBCOMMAND_MODULES', + [CustomCmdStub()], + ) + + assert ReturnCode.ERROR == invoke_cli_app(['sentinel']) + + captured_outputs = capsys.readouterr() + assert captured_outputs.err == f'{expected_stderr !s}\n' + + +def test_app_exit( + capsys: pytest.CaptureFixture[str], + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Check that an exit exception is re-raised.""" + class CustomCmdStub: + CLI_SUBCOMMAND_NAME = 'sentinel' + + def populate_argument_parser( + self, subcommand_parser: ArgumentParser, + ) -> None: + return None + + def invoke_cli_app(self, parsed_cli_args: Namespace) -> ReturnCodeType: + raise PreCommitTerraformExit('sentinel') + + monkeypatch.setattr( + _cli_parsing_mod, + 'SUBCOMMAND_MODULES', + [CustomCmdStub()], + ) + + with pytest.raises(PreCommitTerraformExit, match='^sentinel$'): + invoke_cli_app(['sentinel']) + + captured_outputs = capsys.readouterr() + assert captured_outputs.err == 'App exiting: sentinel\n' diff --git a/tests/pytest/terraform_docs_replace_test.py b/tests/pytest/terraform_docs_replace_test.py new file mode 100644 index 000000000..87989d965 --- /dev/null +++ b/tests/pytest/terraform_docs_replace_test.py @@ -0,0 +1,123 @@ +"""Tests for the `replace-docs` subcommand.""" + +from argparse import ArgumentParser, Namespace +from subprocess import CalledProcessError + +import pytest +import pytest_mock + +from pre_commit_terraform._structs import ReturnCode +from pre_commit_terraform.terraform_docs_replace import ( + invoke_cli_app, + populate_argument_parser, + subprocess as replace_docs_subprocess_mod, +) + + +def test_arg_parser_populated() -> None: + """Check that `replace-docs` populates its parser.""" + test_arg_parser = ArgumentParser() + populate_argument_parser(test_arg_parser) + assert test_arg_parser.get_default('dest') == 'README.md' + + +def test_check_is_deprecated() -> None: + """Verify that `replace-docs` shows a deprecation warning.""" + deprecation_msg_regex = ( + r'^`terraform_docs_replace` hook is DEPRECATED\.' + 'For migration.*$' + ) + with pytest.warns(UserWarning, match=deprecation_msg_regex): + # not `pytest.deprecated_call()` due to this being a user warning + invoke_cli_app(Namespace(filenames=[])) + + +@pytest.mark.parametrize( + ('parsed_cli_args', 'expected_cmds'), + ( + pytest.param(Namespace(filenames=[]), [], id='no-files'), + pytest.param( + Namespace( + dest='SENTINEL.md', + filenames=['some.tf'], + sort=False, + ), + ['terraform-docs md ./ > .//SENTINEL.md'], + id='one-file', + ), + pytest.param( + Namespace( + dest='SENTINEL.md', + filenames=['some.tf', 'thing/weird.tfvars'], + sort=True, + ), + [ + 'terraform-docs --sort-by-required md ./ > .//SENTINEL.md', + 'terraform-docs --sort-by-required md ./thing ' + '> ./thing/SENTINEL.md', + ], + id='two-sorted-files', + ), + pytest.param( + Namespace(filenames=['some.thing', 'un.supported']), + [], + id='invalid-files', + ), + ), +) +@pytest.mark.filterwarnings( + 'ignore:`terraform_docs_replace` hook is DEPRECATED.:UserWarning:' + 'pre_commit_terraform.terraform_docs_replace', +) +def test_control_flow_positive( + expected_cmds: list[str], + mocker: pytest_mock.MockerFixture, + monkeypatch: pytest.MonkeyPatch, + parsed_cli_args: Namespace, +) -> None: + """Check that the subcommand's happy path works.""" + check_call_mock = mocker.Mock() + monkeypatch.setattr( + replace_docs_subprocess_mod, + 'check_call', + check_call_mock, + ) + + assert ReturnCode.OK == invoke_cli_app(parsed_cli_args) + + executed_commands = [ + cmd for ((cmd, ), _shell) in check_call_mock.call_args_list + ] + + assert len(expected_cmds) == check_call_mock.call_count + assert expected_cmds == executed_commands + + +@pytest.mark.filterwarnings( + 'ignore:`terraform_docs_replace` hook is DEPRECATED.:UserWarning:' + 'pre_commit_terraform.terraform_docs_replace', +) +def test_control_flow_negative( + mocker: pytest_mock.MockerFixture, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Check that the subcommand's error processing works.""" + parsed_cli_args = Namespace( + dest='SENTINEL.md', + filenames=['some.tf'], + sort=True, + ) + expected_cmd = 'terraform-docs --sort-by-required md ./ > .//SENTINEL.md' + + check_call_mock = mocker.Mock( + side_effect=CalledProcessError(ReturnCode.ERROR, expected_cmd), + ) + monkeypatch.setattr( + replace_docs_subprocess_mod, + 'check_call', + check_call_mock, + ) + + assert ReturnCode.ERROR == invoke_cli_app(parsed_cli_args) + + check_call_mock.assert_called_once_with(expected_cmd, shell=True) diff --git a/tox.ini b/tox.ini new file mode 100644 index 000000000..195db1536 --- /dev/null +++ b/tox.ini @@ -0,0 +1,286 @@ +[tox] +isolated_build = true + + +[python-cli-options] +byte-warnings = -b +byte-errors = -bb +max-isolation = -E -s -I +some-isolation = -E -s +warnings-to-errors = -Werror + + +[testenv] +description = Run pytest under {envpython} +dependency_groups = + testing +commands = + {envpython} \ + {[python-cli-options]byte-errors} \ + {[python-cli-options]max-isolation} \ + {[python-cli-options]warnings-to-errors} \ + -m pytest \ + {tty:--color=yes} \ + {posargs:--cov-report=html:{envtmpdir}{/}htmlcov{/}} +commands_post = + -{envpython} \ + {[python-cli-options]byte-errors} \ + {[python-cli-options]max-isolation} \ + {[python-cli-options]warnings-to-errors} \ + -c \ + 'import atexit, os, sys; \ + os.getenv("GITHUB_ACTIONS") == "true" or sys.exit(); \ + import coverage; \ + gh_summary_fd = open(\ + os.environ["GITHUB_STEP_SUMMARY"], encoding="utf-8", mode="a",\ + ); \ + atexit.register(gh_summary_fd.close); \ + cov = coverage.Coverage(); \ + cov.load(); \ + cov.report(file=gh_summary_fd, output_format="markdown")' + {envpython} \ + {[python-cli-options]byte-errors} \ + {[python-cli-options]max-isolation} \ + {[python-cli-options]warnings-to-errors} \ + -c \ + 'import os, pathlib, sys; \ + os.getenv("GITHUB_ACTIONS") == "true" or sys.exit(); \ + cov_report_arg_prefix = "--cov-report=xml:"; \ + test_report_arg_prefix = "--junitxml="; \ + cov_reports = [\ + arg[len(cov_report_arg_prefix):] for arg in sys.argv \ + if arg.startswith(cov_report_arg_prefix)\ + ]; \ + test_reports = [\ + arg[len(test_report_arg_prefix):] for arg in sys.argv \ + if arg.startswith(test_report_arg_prefix)\ + ]; \ + cov_report_file = cov_reports[-1] if cov_reports else None; \ + test_report_file = test_reports[-1] if test_reports else None; \ + gh_output_fd = open(\ + os.environ["GITHUB_OUTPUT"], encoding="utf-8", mode="a",\ + ); \ + cov_report_file and \ + print(f"cov-report-files={cov_report_file !s}", file=gh_output_fd); \ + test_report_file and \ + print(f"test-result-files={test_report_file !s}", file=gh_output_fd); \ + print("codecov-flags=pytest", file=gh_output_fd); \ + gh_output_fd.close()' \ + {posargs} + # Print out the output coverage dir and a way to serve html: + {envpython} \ + {[python-cli-options]byte-errors} \ + {[python-cli-options]max-isolation} \ + {[python-cli-options]warnings-to-errors} \ + -c\ + 'import pathlib, shlex, sys; \ + cov_html_report_arg_prefix = "--cov-report=html:"; \ + cov_html_reports = [\ + arg[len(cov_html_report_arg_prefix):] for arg in sys.argv \ + if arg.startswith(cov_html_report_arg_prefix)\ + ]; \ + cov_html_reports or sys.exit(); \ + cov_html_report_dir = pathlib.Path(cov_html_reports[-1]); \ + index_file = cov_html_report_dir / "index.html";\ + html_url = f"file://\{index_file\}";\ + browse_cmd = shlex.join(("python3", "-Im", "webbrowser", html_url)); \ + serve_cmd = shlex.join((\ + "python3", "-Im", "http.server", \ + "--directory", "cov_html_report_dir", "0", \ + )); \ + print(f"\nTo open the HTML coverage report, run\n\n\ + \t\{browse_cmd !s\}\n");\ + print(f"To serve \ + the HTML coverage report with a local web server, use\n\n\ + \t\{serve_cmd !s\}\n")' \ + {posargs:--cov-report=html:{envtmpdir}{/}htmlcov{/}} +package = editable +pass_env = + CI + GITHUB_* + SSH_AUTH_SOCK + TERM +set_env = + COVERAGE_PROCESS_START = {toxinidir}{/}.coveragerc +wheel_build_env = .pkg + + +[testenv:cleanup-dists] +description = + Wipe the the dist{/} folder +dependency_groups = +commands_pre = +commands = + {envpython} \ + {[python-cli-options]byte-errors} \ + {[python-cli-options]max-isolation} \ + {[python-cli-options]warnings-to-errors} \ + -c \ + 'import os, shutil, sys; \ + dists_dir = "{toxinidir}{/}dist{/}"; \ + shutil.rmtree(dists_dir, ignore_errors=True); \ + sys.exit(os.path.exists(dists_dir))' +commands_post = +package = skip + + +[testenv:build-dists] +description = + Build dists with {basepython} and put them into the dist{/} folder +dependency_groups = + building +depends = + cleanup-dists +commands = + {envpython} \ + {[python-cli-options]byte-errors} \ + {[python-cli-options]max-isolation} \ + {[python-cli-options]warnings-to-errors} \ + -m build \ + {posargs:} +commands_post = +package = skip + + +[testenv:metadata-validation] +description = + Verify that dists under the `dist{/}` dir + have valid metadata +dependency_groups = + upstreaming +depends = + build-dists +commands = + {envpython} \ + {[python-cli-options]byte-errors} \ + {[python-cli-options]max-isolation} \ + {[python-cli-options]warnings-to-errors} \ + -m twine \ + check \ + --strict \ + dist{/}* +commands_post = +package = skip + + +[testenv:pre-commit] +description = + Run the quality checks under {basepython}; run as + `SKIP=check-id1,check-id2 tox r -e pre-commit` to instruct the underlying + `pre-commit` invocation avoid running said checks; Use + `tox r -e pre-commit -- check-id1 --all-files` to select checks matching IDs + aliases{:} `tox r -e pre-commit -- mypy --all-files` will run 3 MyPy + invocations, but `tox r -e pre-commit -- mypy-py313 --all-files` runs one. +commands = + {envpython} \ + {[python-cli-options]byte-errors} \ + {[python-cli-options]max-isolation} \ + {[python-cli-options]warnings-to-errors} \ + -m pre_commit \ + run \ + --color=always \ + --show-diff-on-failure \ + {posargs:--all-files} + + # Print out the advice on how to install pre-commit from this env into Git: + -{envpython} \ + {[python-cli-options]byte-errors} \ + {[python-cli-options]max-isolation} \ + {[python-cli-options]warnings-to-errors} \ + -c \ + 'cmd = "{envpython} -m pre_commit install"; \ + scr_width = len(cmd) + 10; \ + sep = "=" * scr_width; \ + cmd_str = " $ \{cmd\}";' \ + 'print(f"\n\{sep\}\nTo install pre-commit hooks into the Git repo, run:\ + \n\n\{cmd_str\}\n\n\{sep\}\n")' +commands_post = + {envpython} \ + {[python-cli-options]byte-errors} \ + {[python-cli-options]max-isolation} \ + {[python-cli-options]warnings-to-errors} \ + -c \ + 'import os, pathlib, sys; \ + os.getenv("GITHUB_ACTIONS") == "true" or sys.exit(); \ + project_root_path = pathlib.Path(r"{toxinidir}"); \ + test_results_dir = pathlib.Path(r"{temp_dir}") / ".test-results"; \ + coverage_result_files = ",".join(\ + str(xml_path.relative_to(project_root_path)) \ + for xml_path in test_results_dir.glob("mypy--py-*{/}cobertura.xml")\ + ); \ + gh_output_fd = open(\ + os.environ["GITHUB_OUTPUT"], encoding="utf-8", mode="a",\ + ); \ + print(\ + f"cov-report-files={coverage_result_files !s}", file=gh_output_fd\ + ); \ + print("codecov-flags=MyPy", file=gh_output_fd); \ + gh_output_fd.close()' + {envpython} \ + {[python-cli-options]byte-errors} \ + {[python-cli-options]max-isolation} \ + {[python-cli-options]warnings-to-errors} \ + -c \ + 'import itertools, os, pathlib, shlex, sys; \ + os.getenv("GITHUB_ACTIONS") == "true" or sys.exit(); \ + test_results_dir = pathlib.Path(r"{temp_dir}") / ".test-results"; \ + text_and_json_reports = itertools.chain( \ + test_results_dir.glob("mypy--py-*{/}*.json"), \ + test_results_dir.glob("mypy--py-*{/}*.txt"), \ + ); \ + report_contents = { \ + report{:} report.read_text() \ + for report in text_and_json_reports \ + }; \ + reports_summary_text_blob = "\n\n".join( \ + f"\N\{NUMBER SIGN\}\N\{NUMBER SIGN\} {report_path.parent.name}{:} " \ + f"`{report_path.name}`\n\n" \ + f"```{report_path.suffix[1:]}\n{report_text}\n```\n" \ + for report_path, report_text in report_contents.items() \ + ); \ + gh_summary_fd = open( \ + os.environ["GITHUB_STEP_SUMMARY"], encoding="utf-8", mode="a", \ + ); \ + print(reports_summary_text_blob, file=gh_summary_fd); \ + gh_summary_fd.close()' + # Print out the output coverage dir and a way to serve html: + {envpython} \ + {[python-cli-options]byte-errors} \ + {[python-cli-options]max-isolation} \ + {[python-cli-options]warnings-to-errors} \ + -c\ + 'import os, pathlib, sys; \ + os.getenv("GITHUB_ACTIONS") == "true" and sys.exit(); \ + len(sys.argv) >= 3 and all(\ + arg != "mypy" and not arg.startswith("mypy-py3") \ + for arg in sys.argv \ + ) and sys.exit(); \ + project_root_path = pathlib.Path(r"{toxinidir}"); \ + test_results_dir = pathlib.Path(r"{temp_dir}") / ".test-results"; \ + coverage_html_report_urls = [\ + f"file://\{xml_path !s\}" \ + for xml_path in test_results_dir.glob("mypy--py-*{/}index.html")\ + ]; \ + coverage_html_report_open_cmds = [\ + f"python3 -Im webbrowser \N\{QUOTATION MARK\}\{html_url !s\}\N\{QUOTATION MARK\}" \ + for html_url in coverage_html_report_urls\ + ]; \ + coverage_html_report_open_cmds_blob = "\n\n\t".join(\ + coverage_html_report_open_cmds,\ + ); \ + print(\ + f"\nTo open the HTML coverage reports, run\n\n\ + \t\{coverage_html_report_open_cmds_blob !s\}\n"\ + ); \ + print(\ + f"[*] Find rest of JSON and text reports, are in the same directories."\ + )\ + ' \ + {posargs:--all-files} +dependency_groups = + linting +isolated_build = true +package = skip +pass_env = + {[testenv]pass_env} + SKIP # set this variable