luau/.github/workflows/benchmark-cachegrind.yml

97 lines
3 KiB
YAML

name: Luau Benchmarks (CacheGrind)
on:
pull_request:
paths-ignore:
- "docs/**"
- "papers/**"
- "rfcs/**"
- "*.md"
- "prototyping/**"
jobs:
benchmarks-run:
name: Run ${{ matrix.bench.title }}
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest]
engine:
- { channel: stable, version: latest }
bench:
- {
script: "run-benchmarks",
timeout: 12,
title: "Luau Benchmarks",
cachegrindTitle: "Performance",
cachegrindIterCount: 20,
}
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v3
with:
token: "${{ secrets.BENCH_GITHUB_TOKEN }}"
- name: Build Luau
run: make config=release luau luau-analyze
- uses: actions/setup-python@v3
with:
python-version: "3.9"
architecture: "x64"
- name: Install python dependencies
run: |
python -m pip install requests
python -m pip install --user numpy scipy matplotlib ipython jupyter pandas sympy nose
- name: Install valgrind
run: |
sudo apt-get install valgrind
- name: Run ${{ matrix.bench.title }} (Cold Cachegrind)
run: sudo bash ./scripts/run-with-cachegrind.sh python ./bench/bench.py "${{ matrix.bench.cachegrindTitle}}Cold" 1 | tee -a ${{ matrix.bench.script }}-output.txt
- name: Run ${{ matrix.bench.title }} (Warm Cachegrind)
run: sudo bash ./scripts/run-with-cachegrind.sh python ./bench/bench.py "${{ matrix.bench.cachegrindTitle }}" ${{ matrix.bench.cachegrindIterCount }} | tee -a ${{ matrix.bench.script }}-output.txt
- name: Navigate to root dir then create gh-pages directory
run: mkdir gh-pages
- name: Switch to & checkout benchmark results (gh-pages) repo
uses: actions/checkout@v3
with:
repository: AllanJeremy/luau-benchmark-results
ref: "main"
token: "${{ secrets.BENCH_GITHUB_TOKEN }}"
path: "./gh-pages"
- name: Navigate into gh-pages directory
run: cd gh-pages
- name: Checkout benchmark results (gh-pages)
run: |
git status
git fetch --no-tags --depth=1 origin main
git checkout main
git pull
git status
#Run `github-action-benchmark` action
- name: Store ${{ matrix.bench.title }} result
uses: Roblox/rhysd-github-action-benchmark@v-luau
with:
name: ${{ matrix.bench.title }}
tool: "benchmarkluau"
# This should come from Luau repo
output-file-path: ../${{ matrix.bench.script }}-output.txt
# This should map to thebenchmark results repo
external-data-json-path: ../gh-pages/dev/bench/data.json
alert-threshold: 150%
fail-threshold: 200%
fail-on-alert: true
comment-on-alert: true
comment-always: true
github-token: ${{ secrets.BENCH_GITHUB_TOKEN }}