Skip to content

add CI to detect performance regressions #53

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 3 commits into from
Oct 4, 2024
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
135 changes: 135 additions & 0 deletions .github/workflows/perf-test.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,135 @@
name: Performance Regression

on:
push:
branches: [main]
paths:
- cpp-linter/src/**
- cpp-linter/Cargo.toml
- Cargo.toml
- Cargo.lock
- .github/workflows/perf-test.yml
- .github/workflows/bench.py
tags-ignore: ['*']
pull_request:
branches: [main]
paths:
- cpp-linter/src/**
- cpp-linter/Cargo.toml
- Cargo.toml
- Cargo.lock
- .github/workflows/perf*
jobs:
build:
name: Build ${{ matrix.name }}
runs-on: ubuntu-latest
strategy:
matrix:
include:
- commit: ${{ github.sha }}
name: current
- commit: ${{ github.event_name == 'pull_request' && github.event.pull_request.base.sha || github.event.before }}
name: previous
outputs:
cached-previous: ${{ steps.is-cached-previous.outputs.is-cached }}
cached-current: ${{ steps.is-cached-current.outputs.is-cached }}
steps:
- name: Checkout ${{ matrix.name }}
uses: actions/checkout@v4
with:
ref: ${{ matrix.commit }}
- name: Cache base ref build
uses: actions/cache@v4
id: cache
with:
key: bin-cache-${{ hashFiles('cpp-linter/src/**', 'Cargo.toml', 'Cargo.lock', 'cpp-linter/Cargo.toml') }}
path: target/release/cpp-linter
- name: Is previous cached?
if: matrix.name == 'previous'
id: is-cached-previous
run: echo "is-cached=${{ steps.cache.outputs.cache-hit }}" >> $GITHUB_OUTPUT
- name: Is current cached?
if: matrix.name == 'current'
id: is-cached-current
run: echo "is-cached=${{ steps.cache.outputs.cache-hit }}" >> $GITHUB_OUTPUT
- run: rustup update --no-self-update
if: steps.cache.outputs.cache-hit != 'true'
- run: cargo build --bin cpp-linter --release
if: steps.cache.outputs.cache-hit != 'true'
- name: Upload build artifact
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.name }}
path: target/release/cpp-linter

benchmark:
name: Measure Performance Difference
needs: [build]
if: ${{ needs.build.outputs.cached-current != 'true' || needs.build.outputs.cached-previous != 'true' }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Checkout libgit2
uses: actions/checkout@v4
with:
repository: libgit2/libgit2
ref: v1.8.1
path: libgit2
- name: Download built binaries
uses: actions/download-artifact@v4
- name: Make binaries executable
run: chmod +x ./*/cpp-linter
- name: Generate compilation database
working-directory: libgit2
run: |
mkdir build && cd build
cmake -DCMAKE_EXPORT_COMPILE_COMMANDS=ON ..
- name: Install cargo-binstall
uses: cargo-bins/cargo-binstall@main
- name: Install hyperfine
run: cargo binstall -y hyperfine
- uses: actions/setup-python@v5
with:
python-version: 3.x
- run: pip install 'cpp-linter < 2.0'
- name: Warmup and list files
env:
CPP_LINTER_COLOR: true
working-directory: libgit2
# Use previous build for stability. This will
# - create the .cpp-linter_cache folder
# - list the files concerning the benchmark test
# NOTE: This does not actually invoke clang tools.
run: ../previous/cpp-linter -l 0 -p build -i='|!src/libgit2' -s="" -c="-*" -e c
- name: Run hyperfine tool
# using the generated compilation database,
# we will use cpp-linter (both builds) to scan libgit2 src/libgit2/**.c files.
working-directory: libgit2
run: >-
hyperfine
--runs 2
--style color
--export-markdown '${{ runner.temp }}/benchmark.md'
--export-json '${{ runner.temp }}/benchmark.json'
--command-name=previous-build
"../previous/cpp-linter -l 0 -p build -i='|!src/libgit2' -e c"
--command-name=current-build
"../current/cpp-linter -l 0 -p build -i='|!src/libgit2' -e c"
--command-name=pure-python
"cpp-linter -l false -j 0 -p build -i='|!src/libgit2' -e c"
- name: Append report to job summary
run: cat ${{ runner.temp }}/benchmark.md >> $GITHUB_STEP_SUMMARY
- name: Upload JSON results
uses: actions/upload-artifact@v4
with:
name: benchmark-json
path: ${{ runner.temp }}/benchmark.json
- name: Annotate summary
run: python .github/workflows/perf_annotate.py "${{ runner.temp }}/benchmark.json"

report-no-src-changes:
runs-on: ubuntu-latest
needs: [build]
if: needs.build.outputs.cached-current == 'true' && needs.build.outputs.cached-previous == 'true'
steps:
- run: echo "::notice title=No benchmark performed::No changes to cpp-linter source code detected."
64 changes: 64 additions & 0 deletions .github/workflows/perf_annotate.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
import argparse
import json
from os import environ
from pathlib import Path
from typing import List, Any, Dict


class Args(argparse.Namespace):
json_file: Path


def main():
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument("json_file", type=Path)
arg_parser.parse_args(namespace=Args)

bench_json = Args.json_file.read_text(encoding="utf-8")
bench: List[Dict[str, Any]] = json.loads(bench_json)["results"]

assert len(bench) == 3
assert bench[0]["command"] == "previous-build"
assert bench[1]["command"] == "current-build"
assert bench[2]["command"] == "pure-python"

old_mean: float = bench[0]["mean"]
new_mean: float = bench[1]["mean"]

diff = round(new_mean - old_mean, 2)
scalar = round(new_mean / old_mean, 2) * 100

output = []
if diff > 2:
output.extend(
[
"> [!CAUTION]",
"> Detected a performance regression in new changes:",
]
)
elif diff < -2:
output.extend(
[
"> [!TIP]",
"> Detected a performance improvement in new changes:",
]
)
else:
output.extend(
[
"> [!NOTE]",
"> Determined a negligible difference in performance with new changes:",
]
)
output[-1] += f" {diff}s ({scalar} %)"
annotation = "\n".join(output)

if "GITHUB_STEP_SUMMARY" in environ:
with open(environ["GITHUB_STEP_SUMMARY"], "a") as summary:
summary.write(f"\n{annotation}\n")
else:
print(annotation)


if __name__ == "__main__":
main()
Loading