Skip to content

Commit

Permalink
Make it easier to benchmark and profile the code (badges#4780)
Browse files Browse the repository at this point in the history
* Make it easier to benchmark and profile the code

* Remove unnecessary escape

* Clarify that the backend server is started without the frontend

* Add missing NODE_CONFIG_ENV environment variable

* Add error message when user has not included console.time statements

* Fix lint issue

* Handle multiple console.time statements

* Switch NODE_CONFIG_ENV to test

* Switch to const as variable never re-assigned
  • Loading branch information
PyvesB authored Mar 22, 2020
1 parent a2c35a6 commit 097bf87
Show file tree
Hide file tree
Showing 11 changed files with 138 additions and 32 deletions.
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -113,3 +113,6 @@ service-definitions.yml

# Rendered API docs
/api-docs/

# Flamebearer
flamegraph.html
3 changes: 0 additions & 3 deletions config/custom-environment-variables.yml
Original file line number Diff line number Diff line change
Expand Up @@ -48,9 +48,6 @@ public:
authorizedOrigins: 'TEAMCITY_ORIGINS'
trace: 'TRACE_SERVICES'

profiling:
makeBadge: 'PROFILE_MAKE_BADGE'

cacheHeaders:
defaultCacheLengthSeconds: 'BADGE_MAX_AGE_SECONDS'

Expand Down
3 changes: 0 additions & 3 deletions config/default.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,6 @@ public:
intervalSeconds: 200
trace: false

profiling:
makeBadge: false

cacheHeaders:
defaultCacheLengthSeconds: 120

Expand Down
12 changes: 1 addition & 11 deletions core/base-service/base-static.js
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,6 @@ const { prepareRoute, namedParamsForMatch } = require('./route')

module.exports = class BaseStaticService extends BaseService {
static register({ camp, metricInstance }, serviceConfig) {
const {
profiling: { makeBadge: shouldProfileMakeBadge },
} = serviceConfig
const { regex, captureNames } = prepareRoute(this.route)

const metricHelper = MetricHelper.create({
Expand Down Expand Up @@ -52,16 +49,9 @@ module.exports = class BaseStaticService extends BaseService {
const format = (match.slice(-1)[0] || '.svg').replace(/^\./, '')
badgeData.format = format

if (shouldProfileMakeBadge) {
console.time('makeBadge total')
}
const svg = makeBadge(badgeData)
if (shouldProfileMakeBadge) {
console.timeEnd('makeBadge total')
}

setCacheHeadersForStaticResource(ask.res)

const svg = makeBadge(badgeData)
makeSend(format, ask.res, end)(svg)

metricHandle.noteResponseSent()
Expand Down
4 changes: 0 additions & 4 deletions core/server/server.js
Original file line number Diff line number Diff line change
Expand Up @@ -123,9 +123,6 @@ const publicConfigSchema = Joi.object({
teamcity: defaultService,
trace: Joi.boolean().required(),
}).required(),
profiling: {
makeBadge: Joi.boolean().required(),
},
cacheHeaders: {
defaultCacheLengthSeconds: Joi.number()
.integer()
Expand Down Expand Up @@ -341,7 +338,6 @@ class Server {
{
handleInternalErrors: config.public.handleInternalErrors,
cacheHeaders: config.public.cacheHeaders,
profiling: config.public.profiling,
fetchLimitBytes: bytes(config.public.fetchLimit),
rasterUrl: config.public.rasterUrl,
private: config.private,
Expand Down
Binary file added doc/flamegraph.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
45 changes: 45 additions & 0 deletions doc/performance-testing.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
# Performance testing

Shields has some basic tooling available to help you get started with
performance testing.

## Benchmarking the badge generation

Want to micro-benchmark a section of the code responsible for generating the
static badges? Follow these two simple steps:

1. Surround the code you want to time with `console.time` and `console.timeEnd`
statements. For example:

```
console.time('makeBadge')
const svg = makeBadge(badgeData)
console.timeEnd('makeBadge')
```

2. Run `npm run benchmark:badge` in your terminal. An average timing will
be displayed!

If you want to change the number of iterations in the benchmark, you can modify
the values specified by the `benchmark:badge` script in _package.json_. If
you want to benchmark a specific code path not covered by the static badge, you
can modify the badge URL in _scripts/benchmark-performance.js_.

## Profiling the full code

Want to have an overview of how the entire application is performing? Simply
run `npm run profile:server` in your terminal. This will start the
backend server (i.e. without the frontend) in profiling mode and any requests
you make on `localhost:8080` will generate data in a file with a name
similar to _isolate-00000244AB6ED3B0-11920-v8.log_.

You can then make use of this profiling data in various tools, for example
[flamebearer](https://github.com/mapbox/flamebearer):

```
npm install -g flamebearer
node --prof-process --preprocess -j isolate-00000244AB6ED3B0-11920-v8.log | flamebearer
```

An example output is the following:
![](https://raw.github.com/badges/shields/master/doc/flamegraph.png)
2 changes: 2 additions & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,8 @@
"start:server:e2e-on-build": "node server 8080",
"start:server": "cross-env NODE_CONFIG_ENV=development nodemon server 8080",
"debug:server": "cross-env NODE_CONFIG_ENV=development nodemon --inspect server.js 8080",
"profile:server": "cross-env NODE_CONFIG_ENV=development node --prof server 8080",
"benchmark:badge": "cross-env NODE_CONFIG_ENV=test node scripts/benchmark-performance.js --iterations 10100 | node scripts/capture-timings.js --warmup-iterations 100",
"prestart": "run-s --silent depcheck defs features",
"start": "concurrently --names server,frontend \"npm run start:server\" \"cross-env GATSBY_BASE_URL=http://localhost:8080 gatsby develop --port 3000\"",
"e2e": "start-server-and-test start http://localhost:3000 test:e2e",
Expand Down
26 changes: 26 additions & 0 deletions scripts/benchmark-performance.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
'use strict'

const config = require('config').util.toObject()
const got = require('got')
const minimist = require('minimist')
const Server = require('../core/server/server')

async function main() {
const server = new Server(config)
await server.start()
const args = minimist(process.argv)
const iterations = parseInt(args.iterations) || 10000
for (let i = 0; i < iterations; ++i) {
await got(`${server.baseUrl}badge/coverage-${i}-green.svg`)
}
await server.stop()
}

;(async () => {
try {
await main()
} catch (e) {
console.error(e)
process.exit(1)
}
})()
11 changes: 0 additions & 11 deletions scripts/benchmark-performance.sh

This file was deleted.

61 changes: 61 additions & 0 deletions scripts/capture-timings.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
'use strict'

const readline = require('readline')
const minimist = require('minimist')

async function captureTimings(warmupIterations) {
const rl = readline.createInterface({
input: process.stdin,
})

const times = {}
let timingsCount = 0
let labelsCount = 0
const timing = /^(.+): ([0-9.]+)ms$/i

for await (const line of rl) {
const match = timing.exec(line)
if (match) {
labelsCount = Object.keys(times).length
if (timingsCount > warmupIterations * labelsCount) {
const label = match[1]
const time = parseFloat(match[2])
times[label] = time + (times[label] || 0)
}
++timingsCount
}
}
return { times, iterations: timingsCount / labelsCount }
}

function logResults({ times, iterations, warmupIterations }) {
if (isNaN(iterations)) {
console.log(
`No timings captured. Have you included console.time statements in the badge creation code path?`
)
} else {
const timedIterations = iterations - warmupIterations
for (const [label, time] of Object.entries(times)) {
const averageTime = time / timedIterations
console.log(
`Average '${label}' time over ${timedIterations} iterations: ${averageTime}ms`
)
}
}
}

async function main() {
const args = minimist(process.argv)
const warmupIterations = parseInt(args['warmup-iterations']) || 100
const { times, iterations } = await captureTimings(warmupIterations)
logResults({ times, iterations, warmupIterations })
}

;(async () => {
try {
await main()
} catch (e) {
console.error(e)
process.exit(1)
}
})()

0 comments on commit 097bf87

Please sign in to comment.