Skip to content

Deploy pull request preview #3005

Deploy pull request preview

Deploy pull request preview #3005

---
name: Deploy pull request preview
# Setup and build files with hugo
# https://github.com/peaceiris/actions-hugo
# Deploy to GitHub pages
# https://github.com/peaceiris/actions-gh-pages
# Triggered by the build pull request preview
# https://securitylab.github.com/research/github-actions-preventing-pwn-requests/
on:
# https://docs.github.com/en/actions/reference/events-that-trigger-workflows#workflow_run
workflow_run:
workflows: ["Build pull request preview"]
types: [completed]
permissions:
contents: write
pull-requests: write
jobs:
deploy_pr_preview:
# Only run if PR preview build was successful
if: >
github.event.workflow_run.event == 'pull_request' &&
github.event.workflow_run.conclusion == 'success'
name: Deploy pull request preview
runs-on: ubuntu-latest
steps:
- name: "Download artifact"
uses: actions/github-script@v8
with:
script: |
var artifacts = await github.rest.actions.listWorkflowRunArtifacts({
owner: context.repo.owner,
repo: context.repo.repo,
run_id: ${{github.event.workflow_run.id }},
});
var matchArtifact = artifacts.data.artifacts.filter((artifact) => {
return artifact.name == "pr"
})[0];
var download = await github.rest.actions.downloadArtifact({
owner: context.repo.owner,
repo: context.repo.repo,
artifact_id: matchArtifact.id,
archive_format: 'zip',
});
var fs = require('fs');
fs.writeFileSync('${{github.workspace}}/pr.zip',
Buffer.from(download.data));
- name: Extract pull request preview
id: extract
run: |
unzip pr.zip
PR_NUMBER="$(cat ./PR)"
echo "pr_number=$PR_NUMBER" >> "$GITHUB_OUTPUT"
- name: Deploy Pull Request preview
uses: peaceiris/actions-gh-pages@v4 # # cspell:disable-line
with:
# Use GITHUB_TOKEN allowing to write to local repository
github_token: ${{ secrets.GITHUB_TOKEN }}
# Purge older files from a given PR
keep_files: false
# Branch to push to
publish_branch: pr_previews
# Source directory
publish_dir: ./${{ steps.extract.outputs.pr_number }}/
# Destination directory
destination_dir: ${{ steps.extract.outputs.pr_number }}
# Reuse PR commit message
commit_message: ${{ github.event.head_commit.message }}
- name: Update Pull Request
uses: actions/github-script@v8
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
var fs = require('fs');
var issue_number = Number('${{steps.extract.outputs.pr_number}}');
const output = `#### Documentation preview deployed!
Available at https://docs.egi.eu/documentation/${issue_number}`;
github.rest.issues.createComment({
issue_number: issue_number,
owner: context.repo.owner,
repo: context.repo.repo,
body: output
})
check_preview_links_users:
env:
BASE_URL:
https://docs.egi.eu/documentation/${{ needs.deploy_pr_preview.outputs.pr_number }}/users
runs-on: ubuntu-latest
needs:
- deploy_pr_preview
if: >
github.event.workflow_run.event == 'pull_request' &&
github.event.workflow_run.conclusion == 'success'
strategy:
matrix:
# This matrix is built from the directories in content/en/users
url:
- /
- "/aai"
- "/compute"
- "/data"
- "/dev-env"
- "/getting-started"
# - "machine-learning" # currently in draft
- "/security"
- "/training"
- "/tutorials"
name: Check links of preview
steps:
- name: Checkout configuration
uses: actions/checkout@v6
- name: Get tools
# We will run hakrawler to crawl a page and sub-pages
# Then use lychee to check whether the urls in that page are ok.
run: |
echo "${{ env.BASE_URL }}"
curl -fSL https://github.com/lycheeverse/lychee/releases/download/lychee-v0.19.1/lychee-x86_64-unknown-linux-musl.tar.gz | tar xvfz -> lychee
GOBIN=${PWD} go install github.com/hakluke/hakrawler@latest
- name: Extract links and check them.
# This passes a given endpoint to hakrawler, which gets all of the links
# in it up to a depth of 10 (-d) and without crawling back up the tree (-i)
# and returning only unique (-u) urls.
# The urls are then parsed via jq to get only hrefs, no scripts
# and the resulting list is sent to lyuchee via stdin (-)
# to see if there are any bad links in those pages
run: >-
echo "${{ env.BASE_URL }}${{ matrix.url }}/" |
./hakrawler -u -i -d 10 -json |
jq -r '. |
select(.Source == "href") | .URL' |
./lychee -
check_preview_links_internal:
runs-on: ubuntu-latest
needs:
- deploy_pr_preview
if: >
github.event.workflow_run.event == 'pull_request' &&
github.event.workflow_run.conclusion == 'success'
strategy:
matrix:
# This matrix is built from the directories in content/en/users
url:
- "accounting"
- "collaboration-tools"
- "configuration-database"
- "getting-started"
- "guidelines-software-development"
- "helpdesk" # cspell:disable-line
- "messaging"
- "monitoring"
- "operations-portal"
- "security-coordination"
# cspell:disable
name: Check links of preview
steps:
- name: Checkout configuration
uses: actions/checkout@v6
- name: Get tools
# We will run hakrawler to crawl a page and sub-pages
# Then use lychee to check whether the urls in that page are ok.
run: |
curl -fSL https://github.com/lycheeverse/lychee/releases/download/lychee-v0.19.1/lychee-x86_64-unknown-linux-musl.tar.gz | tar xvfz -> lychee
GOBIN=${PWD} go install github.com/hakluke/hakrawler@latest
- name: Extract links and check them.
# This passes a given endpoint to hakrawler, which gets all of the links
# in it up to a depth of 10 (-d) and without crawling back up the tree (-i)
# and returning only unique (-u) urls.
# The urls are then parsed via jq to get only hrefs, no scripts
# and the resulting list is sent to lyuchee via stdin (-)
# to see if there are any bad links in those pages
run: >-
echo "https://docs.egi.eu/documentation/${{ github.event.number
}}/internal/${{ matrix.url }}/" |
./hakrawler -u -i -d 10 -json |
jq -r '. |
select(.Source == "href") | .URL' |
./lychee -
check_preview_links_providers:
runs-on: ubuntu-latest
needs:
- deploy_pr_preview
if: >
github.event.workflow_run.event == 'pull_request' &&
github.event.workflow_run.conclusion == 'success'
strategy:
matrix:
# This matrix is built from the directories in content/en/users
url:
- "check-in"
- "cloud-compute"
- "datahub" # cspell:disable-line
- "high-throughput-compute"
- "joining"
- "notebooks"
- "online-storage"
- "operations-manuals"
- "rod"
# cspell:disable
name: Check links of preview
steps:
- name: Checkout configuration
uses: actions/checkout@v6
- name: Get tools
# We will run hakrawler to crawl a page and sub-pages
# Then use lychee to check whether the urls in that page are ok.
run: |
curl -fSL https://github.com/lycheeverse/lychee/releases/download/lychee-v0.19.1/lychee-x86_64-unknown-linux-musl.tar.gz | tar xvfz -> lychee
GOBIN=${PWD} go install github.com/hakluke/hakrawler@latest
- name: Extract links and check them.
# This passes a given endpoint to hakrawler, which gets all of the links
# in it up to a depth of 10 (-d) and without crawling back up the tree (-i)
# and returning only unique (-u) urls.
# The urls are then parsed via jq to get only hrefs, no scripts
# and the resulting list is sent to lyuchee via stdin (-)
# to see if there are any bad links in those pages
run: >-
echo "https://docs.egi.eu/documentation/${{
github.event.pull_request.number }}/providers/${{ matrix.url }}/" |
./hakrawler -u -i -d 10 -json |
jq -r '. |
select(.Source == "href") | .URL' |
./lychee -
lychee-otherdocs: # cspell:disable-line
needs:
- deploy_pr_preview
name: Check links at base of the about pages
strategy:
matrix:
url: # do not include the root url, or we will crawl the whole site
- about
- support
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v6
- name: Get tools
run: |
curl -fSL https://github.com/lycheeverse/lychee/releases/download/lychee-v0.19.1/lychee-x86_64-unknown-linux-musl.tar.gz | tar xvfz -> lychee
GOBIN=${PWD} go install github.com/hakluke/hakrawler@latest
- name: Check Links.
run: >-
echo "https://docs.egi.eu/documentation/${{
github.event.pull_request.number }}/${{ matrix.url }}/" |
./hakrawler -u -i -d 10 -json |
jq -r '. |
select(.Source == "href") | .URL' |
./lychee -