Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

ci(report): Add runtime tests report #10764

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 6 additions & 4 deletions .github/scripts/tests_run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,11 @@ function run_test {
local error=0
local sdkconfig_path
local extra_args
local test_type

sketchdir=$(dirname "$sketch")
sketchname=$(basename "$sketchdir")
test_type=$(basename "$(dirname "$sketchdir")")

if [ "$options" -eq 0 ] && [ -f "$sketchdir"/ci.json ]; then
len=$(jq -r --arg target "$target" '.fqbn[$target] | length' "$sketchdir"/ci.json)
Expand Down Expand Up @@ -113,14 +115,14 @@ function run_test {
rm "$sketchdir"/diagram.json 2>/dev/null || true

result=0
printf "\033[95mpytest \"%s/test_%s.py\" --build-dir \"%s\" --junit-xml=\"%s\" %s\033[0m\n" "$sketchdir" "$sketchname" "$build_dir" "$report_file" "${extra_args[*]@Q}"
bash -c "set +e; pytest \"$sketchdir/test_$sketchname.py\" --build-dir \"$build_dir\" --junit-xml=\"$report_file\" ${extra_args[*]@Q}; exit \$?" || result=$?
printf "\033[95mpytest \"%s/test_%s.py\" --build-dir \"%s\" --junit-xml=\"%s\" -o junit_suite_name=%s_%s_%s_%s%s %s\033[0m\n" "$sketchdir" "$sketchname" "$build_dir" "$report_file" "$test_type" "$platform" "$target" "$sketchname" "$i" "${extra_args[*]@Q}"
bash -c "set +e; pytest \"$sketchdir/test_$sketchname.py\" --build-dir \"$build_dir\" --junit-xml=\"$report_file\" -o junit_suite_name=${test_type}_${platform}_${target}_${sketchname}${i} ${extra_args[*]@Q}; exit \$?" || result=$?
printf "\n"
if [ $result -ne 0 ]; then
result=0
printf "\033[95mRetrying test: %s -- Config: %s\033[0m\n" "$sketchname" "$i"
printf "\033[95mpytest \"%s/test_%s.py\" --build-dir \"%s\" --junit-xml=\"%s\" %s\033[0m\n" "$sketchdir" "$sketchname" "$build_dir" "$report_file" "${extra_args[*]@Q}"
bash -c "set +e; pytest \"$sketchdir/test_$sketchname.py\" --build-dir \"$build_dir\" --junit-xml=\"$report_file\" ${extra_args[*]@Q}; exit \$?" || result=$?
printf "\033[95mpytest \"%s/test_%s.py\" --build-dir \"%s\" --junit-xml=\"%s\" -o junit_suite_name=%s_%s_%s_%s%s %s\033[0m\n" "$sketchdir" "$sketchname" "$build_dir" "$report_file" "$test_type" "$platform" "$target" "$sketchname" "$i" "${extra_args[*]@Q}"
bash -c "set +e; pytest \"$sketchdir/test_$sketchname.py\" --build-dir \"$build_dir\" --junit-xml=\"$report_file\" -o junit_suite_name=${test_type}_${platform}_${target}_${sketchname}${i} ${extra_args[*]@Q}; exit \$?" || result=$?
printf "\n"
if [ $result -ne 0 ]; then
printf "\033[91mFailed test: %s -- Config: %s\033[0m\n\n" "$sketchname" "$i"
Expand Down
37 changes: 33 additions & 4 deletions .github/workflows/tests_results.yml
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ jobs:
original_sha=$(cat ./artifacts/parent-artifacts/sha.txt)
original_ref=$(cat ./artifacts/parent-artifacts/ref.txt)
original_conclusion=$(cat ./artifacts/parent-artifacts/conclusion.txt)
original_run_id=$(cat ./artifacts/parent-artifacts/run_id.txt)

# Sanitize the values to avoid security issues

Expand All @@ -59,17 +60,27 @@ jobs:
# Conclusion: Allow alphabetical characters and underscores
original_conclusion=$(echo "$original_conclusion" | tr -cd '[:alpha:]_')

# Run ID: Allow numeric characters
original_run_id=$(echo "$original_run_id" | tr -cd '[:digit:]')

echo "original_event=$original_event" >> $GITHUB_ENV
echo "original_action=$original_action" >> $GITHUB_ENV
echo "original_sha=$original_sha" >> $GITHUB_ENV
echo "original_ref=$original_ref" >> $GITHUB_ENV
echo "original_conclusion=$original_conclusion" >> $GITHUB_ENV
echo "original_run_id=$original_run_id" >> $GITHUB_ENV

echo "original_event = $original_event"
echo "original_action = $original_action"
echo "original_sha = $original_sha"
echo "original_ref = $original_ref"
echo "original_conclusion = $original_conclusion"
echo "original_run_id = $original_run_id"

- name: Print links to other runs
run: |
echo "Build, Hardware and QEMU tests: https://github.com/${{ github.repository }}/actions/runs/${{ env.original_run_id }}"
echo "Wokwi tests: https://github.com/${{ github.repository }}/actions/runs/${{ github.event.workflow_run.id }}"

- name: Publish Unit Test Results
uses: EnricoMi/publish-unit-test-result-action@v2
Expand All @@ -80,6 +91,17 @@ jobs:
files: ./artifacts/**/*.xml
action_fail: true
compare_to_earlier_commit: false
json_file: ./unity_results.json
json_suite_details: true

- name: Upload JSON
uses: actions/upload-artifact@v4
if: ${{ always() }}
with:
name: unity_results
overwrite: true
path: |
./unity_results.json

- name: Fail if tests failed
if: ${{ env.original_conclusion == 'failure' || env.original_conclusion == 'timed_out' || github.event.workflow_run.conclusion == 'failure' || github.event.workflow_run.conclusion == 'timed_out' }}
Expand Down Expand Up @@ -138,11 +160,18 @@ jobs:
})).data;
core.info(`${name} is ${state}`);

- name: Create output folder
- name: Generate report
if: ${{ !cancelled() && (env.original_event == 'schedule' || env.original_event == 'workflow_dispatch') }} # codespell:ignore cancelled
env:
REPORT_FILE: ./runtime-tests-results/RUNTIME_TESTS_REPORT.md
WOKWI_RUN_ID: ${{ github.event.workflow_run.id }}
BUILD_RUN_ID: ${{ env.original_run_id }}
IS_FAILING: ${{ env.original_conclusion == 'failure' || env.original_conclusion == 'timed_out' || github.event.workflow_run.conclusion == 'failure' || github.event.workflow_run.conclusion == 'timed_out' || job.status == 'failure' }}
run: |
rm -rf artifacts
mkdir -p runtime-tests-results
rm -rf artifacts $REPORT_FILE
mv -f ./unity_results.json ./runtime-tests-results/unity_results.json
touch $REPORT_FILE
python3 ./runtime-tests-results/table_generator.py ./runtime-tests-results/unity_results.json >> $REPORT_FILE

- name: Generate badge
if: ${{ !cancelled() && (env.original_event == 'schedule' || env.original_event == 'workflow_dispatch') }} # codespell:ignore cancelled
Expand All @@ -161,6 +190,6 @@ jobs:
git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
if [[ `git status --porcelain` ]]; then
git add --all
git commit -m "Updated runtime tests badge"
git commit -m "Updated runtime tests report"
git push origin HEAD:gh-pages
fi
4 changes: 4 additions & 0 deletions .github/workflows/tests_wokwi.yml
Original file line number Diff line number Diff line change
Expand Up @@ -109,6 +109,10 @@ jobs:
printf "\nAction = "
cat artifacts/action.txt

printf "${{ github.event.workflow_run.id }}" >> artifacts/run_id.txt
printf "\nRun ID = "
cat artifacts/run_id.txt

if [ -z "$ref" ] || [ "$ref" == "null" ]; then
echo "Failed to get PR number or ref"
exit 1
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
[![Build Status](https://img.shields.io/github/actions/workflow/status/espressif/arduino-esp32/push.yml?branch=master&event=push&label=Compilation%20Tests)](https://github.com/espressif/arduino-esp32/actions/workflows/push.yml?query=branch%3Amaster+event%3Apush)
[![Verbose Build Status](https://img.shields.io/github/actions/workflow/status/espressif/arduino-esp32/push.yml?branch=master&event=schedule&label=Compilation%20Tests%20(Verbose))](https://github.com/espressif/arduino-esp32/actions/workflows/push.yml?query=branch%3Amaster+event%3Aschedule)
[![External Libraries Test](https://img.shields.io/github/actions/workflow/status/espressif/arduino-esp32/lib.yml?branch=master&event=schedule&label=External%20Libraries%20Test)](https://github.com/espressif/arduino-esp32/blob/gh-pages/LIBRARIES_TEST.md)
[![Runtime Tests](https://github.com/espressif/arduino-esp32/blob/gh-pages/runtime-tests-results/badge.svg)](https://github.com/espressif/arduino-esp32/actions/workflows/tests_results.yml)
[![Runtime Tests](https://github.com/espressif/arduino-esp32/blob/gh-pages/runtime-tests-results/badge.svg)](https://github.com/espressif/arduino-esp32/blob/gh-pages/runtime-tests-results/RUNTIME_TESTS_REPORT.md)

### Need help or have a question? Join the chat at [Gitter](https://gitter.im/espressif/arduino-esp32) or [open a new Discussion](https://github.com/espressif/arduino-esp32/discussions)

Expand Down
Loading