Skip to content

Commit 8dcf365

Browse files
committed
ci(runner): Add benchmark example and print benchmark results
1 parent 531ad57 commit 8dcf365

19 files changed

+821
-6
lines changed

.github/workflows/build-run-applications.yml

Lines changed: 87 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,25 @@ name: Build ESP-BSP apps
55

66
on:
77
pull_request:
8-
types: [opened, reopened, synchronize]
8+
types: [opened, reopened, synchronize, labeled]
9+
push:
10+
branches:
11+
- master
12+
workflow_dispatch:
13+
inputs:
14+
WFType:
15+
description: 'Workflow type'
16+
required: true
17+
default: 'Build + Tests'
18+
type: choice
19+
options:
20+
- Build + Tests
21+
- Build + Tests + Benchmark
22+
23+
# Cancel previous CI, if running and changed label or pushed PR (Prevent to wait for runners)
24+
concurrency:
25+
group: pr-${{ github.event.pull_request.number }}
26+
cancel-in-progress: true
927

1028
jobs:
1129
build:
@@ -89,7 +107,7 @@ jobs:
89107
90108
run-target:
91109
name: Run apps
92-
if: github.repository_owner == 'espressif' && needs.prepare.outputs.build_only != '1'
110+
if: github.repository_owner == 'espressif' && !contains(github.event.pull_request.labels.*.name, 'Build only')
93111
needs: build
94112
strategy:
95113
fail-fast: false
@@ -151,6 +169,7 @@ jobs:
151169
target: "esp32s3"
152170
env:
153171
TEST_RESULT_NAME: test_results_${{ matrix.runner.target }}_${{ matrix.runner.marker }}_${{ matrix.idf_ver }}
172+
BENCHMARK_RESULT_NAME: benchmark_${{ matrix.runner.target }}_${{ matrix.runner.marker }}_${{ matrix.idf_ver }}
154173
TEST_RESULT_FILE: test_results_${{ matrix.runner.target }}_${{ matrix.runner.marker }}_${{ matrix.idf_ver }}.xml
155174
runs-on: [self-hosted, Linux, bspwall]
156175
container:
@@ -165,22 +184,58 @@ jobs:
165184
- name: Install Python packages
166185
env:
167186
PIP_EXTRA_INDEX_URL: "https://dl.espressif.com/pypi/"
168-
run: pip install --prefer-binary cryptography pytest-embedded pytest-embedded-serial-esp pytest-embedded-idf pytest-custom_exit_code
187+
run: |
188+
echo "PYTEST_BENCHMARK_IGNORE=--ignore='examples/display_lvgl_benchmark'" >> $GITHUB_ENV
189+
pip install --prefer-binary cryptography pytest-embedded pytest-embedded-serial-esp pytest-embedded-idf pytest-custom_exit_code
190+
- name: Download latest results
191+
uses: actions/download-artifact@v4
192+
with:
193+
pattern: benchmark_*
194+
path: benchmark/
195+
- name: Set ignores
196+
if: contains(github.event.pull_request.labels.*.name, 'Run benchmark') || contains(inputs.WFType, 'Build + Tests + Benchmark') || github.ref_name == 'master'
197+
id: set_ignores
198+
run: |
199+
echo "PYTEST_BENCHMARK_IGNORE=" >> $GITHUB_ENV
200+
- name: Pull
201+
run: |
202+
git pull --rebase origin ${{ github.head_ref }} || echo "No remote changes to rebase"
169203
- name: Run apps
170204
run: |
171-
pytest --suppress-no-test-exit-code --ignore-glob '*/managed_components/*' --ignore=.github --junit-xml=${{ env.TEST_RESULT_FILE }} --target=${{ matrix.runner.target }} -m ${{ matrix.runner.marker }} --build-dir=build_${{ matrix.runner.runs-on }}
205+
pytest --suppress-no-test-exit-code --ignore-glob '*/managed_components/*' --ignore=.github ${{ env.PYTEST_BENCHMARK_IGNORE }} --junit-xml=${{ env.TEST_RESULT_FILE }} --target=${{ matrix.runner.target }} -m ${{ matrix.runner.marker }} --build-dir=build_${{ matrix.runner.runs-on }}
172206
- name: Upload test results
173207
uses: actions/upload-artifact@v4
174208
if: always()
175209
with:
176210
name: ${{ env.TEST_RESULT_NAME }}
177-
path: ${{ env.TEST_RESULT_FILE }}
211+
path: |
212+
${{ env.TEST_RESULT_FILE }}
213+
benchmark_*.md
214+
benchmark_*.json
215+
benchmark.json
216+
- name: Upload test results
217+
uses: actions/upload-artifact@v4
218+
if: github.ref_name == 'master'
219+
with:
220+
name: ${{ env.BENCHMARK_RESULT_NAME }}
221+
path: |
222+
benchmark_*.md
223+
benchmark_*.json
224+
- name: Update benchmark release
225+
uses: pyTooling/Actions/releaser@r0
226+
if: github.ref_name == 'master'
227+
with:
228+
token: ${{ secrets.GITHUB_TOKEN }}
229+
files: |
230+
benchmark_*.json
231+
benchmark_*.md
232+
tag: benchmark-latest
178233

179234
publish-results:
180235
name: Publish Test results
181236
needs:
182237
- run-target
183-
if: github.repository_owner == 'espressif' && always() && github.event_name == 'pull_request' && needs.prepare.outputs.build_only == '0'
238+
if: github.repository_owner == 'espressif' && always() && github.event_name == 'pull_request' && !contains(github.event.pull_request.labels.*.name, 'Build only')
184239
runs-on: ubuntu-22.04
185240
steps:
186241
- name: Download Test results
@@ -192,3 +247,29 @@ jobs:
192247
uses: EnricoMi/publish-unit-test-result-action@v2
193248
with:
194249
files: test_results/**/*.xml
250+
- name: Find test result files
251+
id: find_files
252+
run: |
253+
OUTPUT_FILE="combined_benchmarks.md"
254+
echo "" > $OUTPUT_FILE
255+
python <<EOF
256+
import glob
257+
258+
files = sorted(glob.glob("test_results/**/benchmark_*.md"))
259+
print(files)
260+
output_file = "combined_benchmarks.md"
261+
262+
with open(output_file, "w", encoding="utf-8") as outfile:
263+
for file in files:
264+
with open(file, "r", encoding="utf-8") as infile:
265+
outfile.write(infile.read() + "\n\n")
266+
267+
print(f"Merged {len(files)} files into {output_file}")
268+
EOF
269+
270+
echo "output_file=$OUTPUT_FILE" >> "$GITHUB_ENV"
271+
- name: Comment PR
272+
uses: thollander/actions-comment-pull-request@v3
273+
with:
274+
comment-tag: benchmark_results
275+
file-path: ${{ env.output_file }}
Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
# For more information about build system see
2+
# https://docs.espressif.com/projects/esp-idf/en/latest/api-guides/build-system.html
3+
# The following five lines of boilerplate have to be in your project's
4+
# CMakeLists in this exact order for cmake to work correctly
5+
cmake_minimum_required(VERSION 3.5)
6+
7+
set(COMPONENTS main) # "Trim" the build. Include the minimal set of components; main and anything it depends on.
8+
include($ENV{IDF_PATH}/tools/cmake/project.cmake)
9+
add_compile_options("-Wno-attributes") # For LVGL code
10+
project(display_lvgl_benchmark)
Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
# Display LVGL Benchmark
2+
3+
This example shows LVGL internal benchmark demo.
4+
5+
## How to use the example
6+
7+
### Hardware Required
8+
9+
* ESP32-S3-LCD-EV-Board or ESP32-S3-LCD-EV-Board-2
10+
* USB-C Cable
11+
12+
### Compile and flash
13+
14+
```
15+
idf.py -p COMx build flash monitor
16+
```
17+
Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
set(LV_DEMO_DIR "")
2+
set(LV_DEMOS_SOURCES "")
3+
if(CONFIG_LV_USE_DEMO_BENCHMARK)
4+
list(APPEND LV_DEMO_DIR ../managed_components/lvgl__lvgl/demos)
5+
file(GLOB_RECURSE LV_DEMOS_SOURCES ${LV_DEMO_DIR}/*.c)
6+
endif()
7+
8+
idf_component_register(
9+
SRCS "main.c" ${LV_DEMOS_SOURCES}
10+
INCLUDE_DIRS "." ${LV_DEMO_DIR})
11+
12+
if(CONFIG_LV_USE_DEMO_BENCHMARK)
13+
set_source_files_properties(
14+
${LV_DEMOS_SOURCES}
15+
PROPERTIES COMPILE_OPTIONS
16+
-DLV_LVGL_H_INCLUDE_SIMPLE)
17+
endif()
Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
description: BSP Display rotation example
2+
dependencies:
3+
esp32_p4_function_ev_board:
4+
version: '*'
5+
override_path: ../../../bsp/esp32_p4_function_ev_board
Lines changed: 39 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,39 @@
1+
/*
2+
* SPDX-FileCopyrightText: 2025 Espressif Systems (Shanghai) CO LTD
3+
*
4+
* SPDX-License-Identifier: CC0-1.0
5+
*/
6+
7+
#include "freertos/FreeRTOS.h"
8+
#include "freertos/task.h"
9+
#include "esp_log.h"
10+
11+
#include "lv_demos.h"
12+
#include "bsp/esp-bsp.h"
13+
14+
static char *TAG = "app_main";
15+
16+
#define LOG_MEM_INFO (0)
17+
18+
void app_main(void)
19+
{
20+
/* Initialize display and LVGL */
21+
#if defined(BSP_LCD_SUB_BOARD_2_H_RES)
22+
/* Only for esp32_s3_lcd_ev_board */
23+
bsp_display_cfg_t cfg = {
24+
.lvgl_port_cfg = ESP_LVGL_PORT_INIT_CONFIG(),
25+
};
26+
cfg.lvgl_port_cfg.task_stack = 10000;
27+
bsp_display_start_with_config(&cfg);
28+
#else
29+
bsp_display_start();
30+
#endif
31+
32+
/* Set display brightness to 100% */
33+
bsp_display_backlight_on();
34+
35+
ESP_LOGI(TAG, "Display LVGL demo");
36+
bsp_display_lock(0);
37+
lv_demo_benchmark(); /* A demo to measure the performance of LVGL or to compare different settings. */
38+
bsp_display_unlock();
39+
}
Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
# Name, Type, SubType, Offset, Size, Flags
2+
# Note: if you change the phy_init or app partition offset, make sure to change the offset in Kconfig.projbuild
3+
nvs, data, nvs, 0x9000, 0x6000,
4+
phy_init, data, phy, 0xf000, 0x1000,
5+
factory, app, factory, 0x10000, 0x160000,
Lines changed: 141 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,141 @@
1+
# SPDX-FileCopyrightText: 2023-2025 Espressif Systems (Shanghai) CO LTD
2+
# SPDX-License-Identifier: CC0-1.0
3+
4+
import os
5+
import datetime
6+
import json
7+
from pathlib import Path
8+
import pytest
9+
from pytest_embedded import Dut
10+
import urllib.request
11+
12+
BENCHMARK_RELEASES_URL = "https://github.com/espressif/esp-bsp/releases/download/benchmark-latest"
13+
14+
15+
def write_to_file(board, ext, text):
16+
with open("benchmark_" + board + ext, "a") as file:
17+
file.write(text)
18+
19+
20+
def read_json_file(board):
21+
try:
22+
url = f"{BENCHMARK_RELEASES_URL}/benchmark_{board}.json"
23+
with urllib.request.urlopen(url) as file:
24+
return json.load(file)
25+
except urllib.error.HTTPError:
26+
return []
27+
except json.JSONDecodeError:
28+
return []
29+
30+
31+
def write_json_file(board, data):
32+
repo_root = Path(__file__).resolve().parent
33+
while repo_root.name != "esp-bsp" and repo_root.parent != repo_root:
34+
repo_root = repo_root.parent
35+
file_path = f"{repo_root}/bsp/{board}/benchmark.json"
36+
try:
37+
os.remove(file_path)
38+
except OSError:
39+
pass
40+
try:
41+
with open(file_path, "a") as file:
42+
file.write(data)
43+
except OSError:
44+
pass
45+
46+
47+
def find_test_results(json_obj, test):
48+
if json_obj:
49+
for t in json_obj["tests"]:
50+
if t["Name"] == test:
51+
return t
52+
53+
54+
def get_test_diff(test1, test2, name, positive):
55+
if not test1 or not test2 or not test1[name] or not test2[name]:
56+
return ""
57+
test1[name] = test1[name].replace("%", "")
58+
test2[name] = test2[name].replace("%", "")
59+
diff = int(test1[name]) - int(test2[name])
60+
if diff == 0:
61+
return ""
62+
else:
63+
if positive:
64+
color = "red" if diff < 0 else "green"
65+
else:
66+
color = "green" if diff < 0 else "red"
67+
sign = "+" if diff > 0 else ""
68+
return f"*<span style=\"color:{color}\"><sub>({sign}{diff})</sub></span>*"
69+
70+
71+
@pytest.mark.esp_box_3
72+
@pytest.mark.esp32_p4_function_ev_board
73+
@pytest.mark.esp32_s3_eye
74+
@pytest.mark.esp32_s3_lcd_ev_board
75+
@pytest.mark.esp32_s3_lcd_ev_board_2
76+
@pytest.mark.m5dial
77+
@pytest.mark.m5stack_core_s3
78+
@pytest.mark.m5stack_core_s3_se
79+
def test_example(dut: Dut, request) -> None:
80+
date = datetime.datetime.now()
81+
board = request.node.callspec.id
82+
83+
# Wait for start benchmark
84+
dut.expect_exact('app_main: Display LVGL demo')
85+
dut.expect_exact('main_task: Returned from app_main()')
86+
87+
try:
88+
os.remove("benchmark_" + board + ".md")
89+
os.remove("benchmark_" + board + ".json")
90+
except OSError:
91+
pass
92+
93+
output = {
94+
"date": date.strftime('%d.%m.%Y %H:%M'),
95+
"board": board
96+
}
97+
98+
# Write board into file
99+
write_to_file(board, ".md", f"# Benchmark for BOARD " + board + "\n\n")
100+
write_to_file(board, ".md", f"**DATE:** " + date.strftime('%d.%m.%Y %H:%M') + "\n\n")
101+
# Get LVGL version write it into file
102+
outdata = dut.expect(r'Benchmark Summary \((.*) \)', timeout=200)
103+
output["LVGL"] = outdata[1].decode()
104+
write_to_file(board, ".md", f"**LVGL version:** " + outdata[1].decode() + "\n\n")
105+
outdata = dut.expect(r'Name, Avg. CPU, Avg. FPS, Avg. time, render time, flush time', timeout=200)
106+
write_to_file(board, ".md", f"| Name | Avg. CPU | Avg. FPS | Avg. time | render time | flush time |\n")
107+
write_to_file(board, ".md", f"| ---- | :------: | :------: | :-------: | :---------: | :--------: |\n") # noqa: E203
108+
109+
last_results = read_json_file(board)
110+
111+
# Benchmark lines
112+
output["tests"] = []
113+
for x in range(17):
114+
outdata = dut.expect(r'([\w \.]+),[ ]?(\d+%),[ ]?(\d+),[ ]?(\d+),[ ]?(\d+),[ ]?(\d+)', timeout=200)
115+
test_entry = {
116+
"Name": outdata[1].decode(),
117+
"Avg. CPU": outdata[2].decode(),
118+
"Avg. FPS": outdata[3].decode(),
119+
"Avg. time": outdata[4].decode(),
120+
"Render time": outdata[5].decode(),
121+
"Flush time": outdata[6].decode()
122+
}
123+
output["tests"].append(test_entry)
124+
125+
last_test_result = find_test_results(last_results, test_entry["Name"])
126+
write_to_file(board, ".md", f"| " +
127+
test_entry["Name"] + " | " +
128+
test_entry["Avg. CPU"] + " " + get_test_diff(test_entry, last_test_result, "Avg. CPU", False) + " | " +
129+
test_entry["Avg. FPS"] + " " + get_test_diff(test_entry, last_test_result, "Avg. FPS", True) + " | " +
130+
test_entry["Avg. time"] + " " + get_test_diff(test_entry, last_test_result, "Avg. time", False) + " | " +
131+
test_entry["Render time"] + " " + get_test_diff(test_entry, last_test_result, "Render time", False) + " | " +
132+
test_entry["Flush time"] + " " + get_test_diff(test_entry, last_test_result, "Flush time", False) + " |\n")
133+
134+
write_to_file(board, ".md", "\n")
135+
write_to_file(board, ".md", "***")
136+
write_to_file(board, ".md", "\n\n")
137+
138+
# Save JSON to file
139+
json_output = json.dumps(output, indent=4)
140+
write_to_file(board, ".json", json_output)
141+
write_json_file(board, json_output)

0 commit comments

Comments
 (0)