Skip to content

Commit 183916d

Browse files
committed
Changes after review
1 parent 8dcf365 commit 183916d

File tree

4 files changed

+19
-51
lines changed

4 files changed

+19
-51
lines changed

.github/workflows/build-run-applications.yml

Lines changed: 5 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -171,6 +171,7 @@ jobs:
171171
TEST_RESULT_NAME: test_results_${{ matrix.runner.target }}_${{ matrix.runner.marker }}_${{ matrix.idf_ver }}
172172
BENCHMARK_RESULT_NAME: benchmark_${{ matrix.runner.target }}_${{ matrix.runner.marker }}_${{ matrix.idf_ver }}
173173
TEST_RESULT_FILE: test_results_${{ matrix.runner.target }}_${{ matrix.runner.marker }}_${{ matrix.idf_ver }}.xml
174+
PYTEST_BENCHMARK_IGNORE: ${{ (contains(github.event.pull_request.labels.*.name, 'Run benchmark') || contains(inputs.WFType, 'Build + Tests + Benchmark') || github.ref_name == 'master') && format(' ') || format('--ignore=examples/display_lvgl_benchmark') }}
174175
runs-on: [self-hosted, Linux, bspwall]
175176
container:
176177
image: python:3.11-bookworm
@@ -185,24 +186,15 @@ jobs:
185186
env:
186187
PIP_EXTRA_INDEX_URL: "https://dl.espressif.com/pypi/"
187188
run: |
188-
echo "PYTEST_BENCHMARK_IGNORE=--ignore='examples/display_lvgl_benchmark'" >> $GITHUB_ENV
189189
pip install --prefer-binary cryptography pytest-embedded pytest-embedded-serial-esp pytest-embedded-idf pytest-custom_exit_code
190190
- name: Download latest results
191191
uses: actions/download-artifact@v4
192192
with:
193193
pattern: benchmark_*
194194
path: benchmark/
195-
- name: Set ignores
196-
if: contains(github.event.pull_request.labels.*.name, 'Run benchmark') || contains(inputs.WFType, 'Build + Tests + Benchmark') || github.ref_name == 'master'
197-
id: set_ignores
198-
run: |
199-
echo "PYTEST_BENCHMARK_IGNORE=" >> $GITHUB_ENV
200-
- name: Pull
201-
run: |
202-
git pull --rebase origin ${{ github.head_ref }} || echo "No remote changes to rebase"
203195
- name: Run apps
204196
run: |
205-
pytest --suppress-no-test-exit-code --ignore-glob '*/managed_components/*' --ignore=.github ${{ env.PYTEST_BENCHMARK_IGNORE }} --junit-xml=${{ env.TEST_RESULT_FILE }} --target=${{ matrix.runner.target }} -m ${{ matrix.runner.marker }} --build-dir=build_${{ matrix.runner.runs-on }}
197+
pytest --suppress-no-test-exit-code --ignore-glob '*/managed_components/*' --ignore=.github --junit-xml=${{ env.TEST_RESULT_FILE }} --target=${{ matrix.runner.target }} -m ${{ matrix.runner.marker }} --build-dir=build_${{ matrix.runner.runs-on }} ${{ env.PYTEST_BENCHMARK_IGNORE }}
206198
- name: Upload test results
207199
uses: actions/upload-artifact@v4
208200
if: always()
@@ -247,7 +239,8 @@ jobs:
247239
uses: EnricoMi/publish-unit-test-result-action@v2
248240
with:
249241
files: test_results/**/*.xml
250-
- name: Find test result files
242+
- name: Find benchmark result files
243+
if: (contains(github.event.pull_request.labels.*.name, 'Run benchmark') || contains(inputs.WFType, 'Build + Tests + Benchmark') || github.ref_name == 'master')
251244
id: find_files
252245
run: |
253246
OUTPUT_FILE="combined_benchmarks.md"
@@ -269,6 +262,7 @@ jobs:
269262
270263
echo "output_file=$OUTPUT_FILE" >> "$GITHUB_ENV"
271264
- name: Comment PR
265+
if: (contains(github.event.pull_request.labels.*.name, 'Run benchmark') || contains(inputs.WFType, 'Build + Tests + Benchmark') || github.ref_name == 'master')
272266
uses: thollander/actions-comment-pull-request@v3
273267
with:
274268
comment-tag: benchmark_results

examples/display_lvgl_benchmark/README.md

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,16 @@
11
# Display LVGL Benchmark
22

3-
This example shows LVGL internal benchmark demo.
3+
This example runs the LVGL benchmark demo to measure graphical performance on Espressif and M5Stack boards. It is used in CI for selected pull requests (based on labels) and after merging changes into the master branch.
4+
5+
## Main Features
6+
- Can be triggered by adding the "Run benchmark" label to a PR.
7+
- The measured values in a PR are compared against the master branch and posted as a comment, highlighting any differences.
8+
- Benchmark results for the master branch are stored in BSP releases.
49

510
## How to use the example
611

12+
This example can be used as standalone example too.
13+
714
### Hardware Required
815

916
* ESP32-S3-LCD-EV-Board or ESP32-S3-LCD-EV-Board-2
Lines changed: 2 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -1,17 +1,3 @@
1-
set(LV_DEMO_DIR "")
2-
set(LV_DEMOS_SOURCES "")
3-
if(CONFIG_LV_USE_DEMO_BENCHMARK)
4-
list(APPEND LV_DEMO_DIR ../managed_components/lvgl__lvgl/demos)
5-
file(GLOB_RECURSE LV_DEMOS_SOURCES ${LV_DEMO_DIR}/*.c)
6-
endif()
7-
81
idf_component_register(
9-
SRCS "main.c" ${LV_DEMOS_SOURCES}
10-
INCLUDE_DIRS "." ${LV_DEMO_DIR})
11-
12-
if(CONFIG_LV_USE_DEMO_BENCHMARK)
13-
set_source_files_properties(
14-
${LV_DEMOS_SOURCES}
15-
PROPERTIES COMPILE_OPTIONS
16-
-DLV_LVGL_H_INCLUDE_SIMPLE)
17-
endif()
2+
SRCS "main.c"
3+
INCLUDE_DIRS ".")

examples/display_lvgl_benchmark/pytest_display_lvgl_benchmark.py

Lines changed: 4 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
# SPDX-FileCopyrightText: 2023-2025 Espressif Systems (Shanghai) CO LTD
22
# SPDX-License-Identifier: CC0-1.0
33

4-
import os
54
import datetime
65
import json
76
from pathlib import Path
@@ -28,22 +27,6 @@ def read_json_file(board):
2827
return []
2928

3029

31-
def write_json_file(board, data):
32-
repo_root = Path(__file__).resolve().parent
33-
while repo_root.name != "esp-bsp" and repo_root.parent != repo_root:
34-
repo_root = repo_root.parent
35-
file_path = f"{repo_root}/bsp/{board}/benchmark.json"
36-
try:
37-
os.remove(file_path)
38-
except OSError:
39-
pass
40-
try:
41-
with open(file_path, "a") as file:
42-
file.write(data)
43-
except OSError:
44-
pass
45-
46-
4730
def find_test_results(json_obj, test):
4831
if json_obj:
4932
for t in json_obj["tests"]:
@@ -84,11 +67,10 @@ def test_example(dut: Dut, request) -> None:
8467
dut.expect_exact('app_main: Display LVGL demo')
8568
dut.expect_exact('main_task: Returned from app_main()')
8669

87-
try:
88-
os.remove("benchmark_" + board + ".md")
89-
os.remove("benchmark_" + board + ".json")
90-
except OSError:
91-
pass
70+
file_path = Path(f"benchmark_" + board + ".md")
71+
file_path.unlink(missing_ok=True)
72+
file_path = Path(f"benchmark_" + board + ".json")
73+
file_path.unlink(missing_ok=True)
9274

9375
output = {
9476
"date": date.strftime('%d.%m.%Y %H:%M'),
@@ -138,4 +120,3 @@ def test_example(dut: Dut, request) -> None:
138120
# Save JSON to file
139121
json_output = json.dumps(output, indent=4)
140122
write_to_file(board, ".json", json_output)
141-
write_json_file(board, json_output)

0 commit comments

Comments
 (0)