Skip to content
name: ASV Benchmarks
on:
push:
branches:
- main
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
permissions:
contents: read # to fetch code (actions/checkout)
env:
# GITHUB_TOKEN: ${{ secrets.OB_BENCH_TOKEN }}
# BENCHMARKS_REPO: ev-br/ob-bench-asv
ASV_CONFIG: asv.conf.json
jobs:
bench:
strategy:
fail-fast: false
matrix:
include:
# define matrix.name to identify github actions machine as hostname changes everytime
- image: "cirun-aws-runner-graviton--${{ github.run_id }}"
name: "gh-graviton"
- image: "cirun-aws-runner-cascade-lake--${{ github.run_id }}"
name: "gh-skylake"
runs-on: ${{ matrix.image }}
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0 # To fetch all commits to be able to generate benchmarks html
- name: Print system information
run: |
if [ "$RUNNER_OS" == "Linux" ]; then
cat /proc/cpuinfo
else
echo "::error::$RUNNER_OS not supported"
exit 1
fi
- name: Install system dependencies
run: |
if [ "$RUNNER_OS" == "Linux" ]; then
sudo apt update
sudo apt-get install -y gfortran cmake ccache python3-pip pkg-config
else
echo "::error::$RUNNER_OS not supported"
exit 1
fi
- name: Install python dependencies
run: |
# --break-system-packages is required on ubuntu noble
pip3 install "numpy<2" meson meson-python ninja build asv virtualenv --break-system-packages
# install the nightly OpenBLAS wheel
pip install -i https://pypi.anaconda.org/scientific-python-nightly-wheels/simple scipy-openblas32 --break-system-packages
python3 -c'import scipy_openblas32 as so; print(so.get_pkg_config())' > scipy_openblas.pc
# export PKG_CONFIG_PATH=$PWD
- name: Print OpenBLAS information
run: |
echo "scipy_openblas.pc contents: "
cat scipy_openblas.pc
- name: Set and log asv machine configuration
run: |
python3 -m asv machine --yes --config asv.conf.json
echo "Machine Configuration:"
cat ~/.asv-machine.json
rm ~/.asv-machine.json
# set the machine name depending on the OS/arch image
echo "Setting machine name to ${{ matrix.name }}"
python3 -m asv machine --machine ${{ matrix.name }} --yes --config $ASV_CONFIG -v
cat ~/.asv-machine.json
- name: Run benchmarks
run: |
python3 -m asv run --config $ASV_CONFIG -v
ls -l .asv/results
echo ">>> results/machine"
ls -l .asv/results/${{ matrix.name }}
env:
PKG_CONFIG_PATH: ${{ github.workspace }}
- name: Store/Upload benchmark results
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.name }}
path: .asv/results/*
if-no-files-found: error
combine-and-publish:
runs-on: ubuntu-latest
needs: bench
steps:
- name: Check out the repository
uses: actions/checkout@v4
with:
fetch-depth: 0 # To fetch all commits to be able to generate benchmarks html
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.11'
- name: Set up ASV
run: |
pip install asv
asv machine --yes --config $ASV_CONFIG
- name: Download all artifacts from benchmarking runs
uses: actions/download-artifact@v3
- name: Combine the runs
run: |
mkdir .asv
mkdir .asv/results
cp -r gh-skylake .asv/results
cp -r gh-graviton .asv/results
ls -l
ls -l .asv/results