Skip to content

CI checks for pull_request by dependabot[bot] #790

CI checks for pull_request by dependabot[bot]

CI checks for pull_request by dependabot[bot] #790

Workflow file for this run

# Summary: OpenFermion continuous integration status checks.
#
# This workflow runs various tests to verify that changes to the OpenFermion
# codebase pass validation and conform to project format and style standards.
# It triggers on certain events such as pull requests and merge-queue merges,
# and can also be invoked manually via the "Run workflow" button at
# https://github.com/quantumlib/OpenFermion/actions/workflows/ci.yaml
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
name: Continuous integration checks
run-name: CI checks for ${{github.event_name}} by ${{github.actor}}
on:
pull_request:
types: [opened, synchronize]
branches:
- master
merge_group:
types:
- checks_requested
# Allow manual invocation.
workflow_dispatch:
inputs:
sha:
description: 'SHA of commit to run against:'
type: string
required: true
python_ver:
description: Normal version of Python to use
type: string
python_compat_ver:
description: Max compat version of Python
type: string
concurrency:
# Cancel any previously-started but still active runs on the same branch.
cancel-in-progress: true
group: ${{github.workflow}}-${{github.event.pull_request.number||github.ref}}
env:
# Default Python version to use.
python_ver: "3.12"
# Oldest Python version to use, for max_compat tests.
python_compat_ver: "3.10"
# Files listing dependencies we install using pip in the various jobs below.
# This is used by setup-python to check whether its cache needs updating.
python_dep_files: >-
dev_tools/requirements/envs/format.env.txt
dev_tools/requirements/envs/mypy.env.txt
dev_tools/requirements/envs/pylint.env.txt
dev_tools/requirements/envs/pytest-extra.env.txt
dev_tools/requirements/envs/pytest.env.txt
dev_tools/requirements/max_compat/pytest-max-compat.env.txt
jobs:
# GitHub Actions can have path filters (i.e., the use of a "paths:" keyword
# on the trigger definitions in the "on:" block earlier in this file). Path
# filters *would* be the natural way to make workflows trigger only when the
# desired files are affected by a pull request – except that the way branch
# protection rules work today is: "If a workflow is skipped due to path
# filtering [...] then checks associated with that workflow will remain in a
# Pending state. A pull request that requires those checks to be successful
# will be blocked from merging." Surprisingly, GitHub doesn't provide
# guidance on how to handle this. Discussions about solutions sometimes
# suggest hacky solutions (c.f. https://stackoverflow.com/a/78003720/743730).
# The approach taken here is to forgo the use of path filtering rules in the
# trigger condition, and instead, do our own filtering using a combination
# of testing specific file patterns (in the changes job below) and "if:"
# conditions on individual jobs in the rest of this workflow.
changes:
name: (Find changed files)
runs-on: ubuntu-24.04
timeout-minutes: 5
outputs:
python: ${{steps.filter.outputs.python}}
python_files: ${{steps.filter.outputs.python_files}}
yaml: ${{steps.filter.outputs.yaml}}
yaml_files: ${{steps.filter.outputs.yaml_files}}
steps:
# When invoked manually, use the given SHA to figure out the change list.
- if: github.event_name == 'workflow_dispatch'
name: Use the user-provided SHA as the basis for comparison
env:
GH_TOKEN: ${{github.token}}
run: |
set -x +e
url="repos/${{github.repository}}/commits/${{inputs.sha}}"
full_sha="$(gh api $url -q '.sha')"
if (( $? == 0 )); then
echo "base=$full_sha" >> "$GITHUB_ENV"
else
{
echo "### :x: Workflow error"
echo "The SHA provided to _Run Workflow_ does not exist:"
echo "<code>${{inputs.sha}}</code>"
} >> "$GITHUB_STEP_SUMMARY"
exit 1
fi
- if: github.event_name != 'workflow_dispatch'
name: Use ref ${{github.ref_name}} as the basis for comparison
run: |
echo base=${{github.ref_name}} >> "$GITHUB_ENV"
- name: Check out a copy of the OpenFermion git repository
uses: actions/checkout@v4
- name: Determine files changed by this ${{github.event_name}} event
uses: dorny/paths-filter@v3
id: filter
with:
base: ${{env.base}}
list-files: 'shell'
# The outputs will be variables named "foo_files" for a filter "foo".
filters: |
python:
- added|modified:
- '**/*.py'
yaml:
- added|modified:
- '**/*.yaml'
- '**/*.yml'
setup:
if: needs.changes.outputs.python == 'true'
name: (Set up Python)
needs: changes
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- name: Check out a copy of the git repository
uses: actions/checkout@v4
- name: Set up Python with caching of pip dependencies
uses: actions/setup-python@v5
with:
python-version: ${{inputs.python_ver || env.python_ver}}
architecture: "x64"
cache: pip
cache-dependency-path: ${{env.python_dep_files}}
- name: Install Python requirements
run: |
set -x
for file in ${{env.python_dep_files}}; do
pip install -r $file
done
set +x
echo "::group::List of installed pip packages and their versions"
pip list
echo "::endgroup::"
python-format:
if: needs.changes.outputs.python == 'true'
name: Python format checks
needs: [changes, setup]
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- name: Check out a copy of the git repository
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set up Python and restore cache
uses: actions/setup-python@v5
with:
python-version: ${{inputs.python_ver || env.python_ver}}
architecture: "x64"
cache: pip
cache-dependency-path: ${{env.python_dep_files}}
- name: Install requirements
run: pip install -r dev_tools/requirements/envs/format.env.txt
- name: Run format checks
run: check/format-incremental
python-mypy:
if: needs.changes.outputs.python == 'true'
name: Python type checks
needs: [changes, setup]
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- name: Check out a copy of the git repository
uses: actions/checkout@v4
- name: Set up Python and restore cache
uses: actions/setup-python@v5
with:
python-version: ${{inputs.python_ver || env.python_ver}}
architecture: "x64"
cache: pip
cache-dependency-path: ${{env.python_dep_files}}
- name: Install requirements
run: pip install -r dev_tools/requirements/envs/mypy.env.txt
- name: Type check
run: check/mypy
python-lint:
if: needs.changes.outputs.python == 'true'
name: Python lint checks
needs: [changes, setup]
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- name: Check out a copy of the git repository
uses: actions/checkout@v4
- name: Set up Python and restore cache
uses: actions/setup-python@v5
with:
python-version: ${{inputs.python_ver || env.python_ver}}
architecture: "x64"
cache: pip
cache-dependency-path: ${{env.python_dep_files}}
- name: Install requirements
run: pip install -r dev_tools/requirements/envs/pylint.env.txt
- name: Run pylint
run: check/pylint
# The next set of matrix tests each consist of 2 job definitions. The job
# named "Thing-matrix" define a matrix of runs for different platforms. It's
# set with "fail-fast: false" so that a failure in one of matrix jobs doesn't
# cause this entire CI workflow to abort. Then, the job named "Thing" is the
# one that actually reports the results, and is the one used in the list of
# required status checks in the repository branch protection rules. It needs
# to be an independent job it has to test the results of all the matrix runs.
pytest-matrix:
if: needs.changes.outputs.python == 'true'
name: (Python pytest matrix)
needs: [changes, setup]
runs-on: ${{ matrix.os }}
timeout-minutes: 15
strategy:
matrix:
os: [ ubuntu-latest, macos-latest, windows-latest ]
cirq-version: [ 1.4.1 ]
fail-fast: false
steps:
- name: Check out a copy of the git repository
uses: actions/checkout@v4
- name: Set up Python and restore cache
uses: actions/setup-python@v5
with:
python-version: ${{inputs.python_ver || env.python_ver}}
cache: pip
cache-dependency-path: ${{env.python_dep_files}}
- name: Install requirements
run: |
pip install -r dev_tools/requirements/envs/pytest.env.txt
pip install cirq-core==${{matrix.cirq-version}}
- name: Run pytest
run: check/pytest
pytest:
if: needs.changes.outputs.python == 'true' && (success() || failure())
name: Python pytest checks
needs: [changes, pytest-matrix]
runs-on: ubuntu-latest
steps:
- run: |
result="${{needs.pytest-matrix.result}}"
if [[ $result == "success" || $result == "skipped" ]]; then
exit 0
else
exit 1
fi
pytest-extra-matrix:
if: needs.changes.outputs.python == 'true'
name: (Python extra pytest matrix)
needs: [changes, setup]
runs-on: ${{ matrix.os }}
timeout-minutes: 15
strategy:
matrix:
os: [ubuntu-latest, macos-latest]
cirq-version: [ 1.4.1 ]
fail-fast: false
steps:
- name: Check out a copy of the git repository
uses: actions/checkout@v4
- name: Set up Python and restore cache
uses: actions/setup-python@v5
with:
python-version: ${{inputs.python_ver || env.python_ver}}
cache: pip
cache-dependency-path: ${{env.python_dep_files}}
- name: Install requirements
run: |
pip install -r dev_tools/requirements/envs/pytest-extra.env.txt
pip install cirq-core==${{matrix.cirq-version}}
- name: Run pytest
run: check/pytest -m "not slow" src/openfermion/resource_estimates
pytest-extra:
if: needs.changes.outputs.python == 'true' && (success() || failure())
name: Python extra pytest checks
needs: [changes, pytest-extra-matrix]
runs-on: ubuntu-latest
steps:
- run: |
result="${{needs.pytest-extra-matrix.result}}"
if [[ $result == "success" || $result == "skipped" ]]; then
exit 0
else
exit 1
fi
python-compat:
if: needs.changes.outputs.python == 'true'
name: Python compatibility checks
needs: [changes, setup]
runs-on: ubuntu-20.04
timeout-minutes: 15
steps:
- name: Check out a copy of the git repository
uses: actions/checkout@v4
# Note: deliberately not using our Python cache here b/c this runs
# a different version of Python.
- name: Set up Python and restore cache
uses: actions/setup-python@v5
with:
python-version: ${{env.python_compat_ver}}
- name: Install requirements
run: |
pip install -r dev_tools/requirements/max_compat/pytest-max-compat.env.txt
- name: Run pytest
run: check/pytest
coverage:
if: needs.changes.outputs.python == 'true'
name: Python code coverage checks
needs: [changes, setup]
runs-on: ubuntu-latest
timeout-minutes: 15
steps:
- name: Check out a copy of the git repository
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set up Python and restore cache
uses: actions/setup-python@v5
with:
python-version: ${{inputs.python_ver || env.python_ver}}
cache: pip
cache-dependency-path: ${{env.python_dep_files}}
- name: Install requirements
run: pip install -r dev_tools/requirements/envs/pytest.env.txt
- name: Run code coverage tests
run: check/pytest-and-incremental-coverage
yaml-lint:
if: needs.changes.outputs.yaml == 'true'
name: YAML lint checks
needs: changes
runs-on: ubuntu-24.04
timeout-minutes: 5
env:
changed_files: ${{needs.changes.outputs.yaml_files}}
steps:
- name: Check out a copy of the git repository
uses: actions/checkout@v4
- name: Set up yamllint output problem matcher
run: |
ls -la
echo "::add-matcher::.github/problem-matchers/yamllint.json"
- name: Run yamllint
run: |
set -x
# shellcheck disable=SC2086
yamllint $changed_files