@@ -5,28 +5,56 @@ name: Build ESP-BSP apps
55
66on :
77 pull_request :
8- types : [opened, reopened, synchronize]
8+ types : [opened, reopened, synchronize, labeled]
9+ push :
10+ branches :
11+ - master
12+ workflow_dispatch :
13+ inputs :
14+ WFType :
15+ description : ' Workflow type'
16+ required : true
17+ default : ' Build + Tests'
18+ type : choice
19+ options :
20+ - Build + Tests
21+ - Build + Tests + Benchmark
922
1023jobs :
1124 build :
1225 strategy :
1326 matrix :
1427 include :
1528 - idf_ver : " latest"
16- parallel_count : 5
29+ parallel_count : 10
1730 parallel_index : 1
1831 - idf_ver : " latest"
19- parallel_count : 5
32+ parallel_count : 10
2033 parallel_index : 2
2134 - idf_ver : " latest"
22- parallel_count : 5
35+ parallel_count : 10
2336 parallel_index : 3
2437 - idf_ver : " latest"
25- parallel_count : 5
38+ parallel_count : 10
2639 parallel_index : 4
2740 - idf_ver : " latest"
28- parallel_count : 5
41+ parallel_count : 10
2942 parallel_index : 5
43+ - idf_ver : " latest"
44+ parallel_count : 10
45+ parallel_index : 6
46+ - idf_ver : " latest"
47+ parallel_count : 10
48+ parallel_index : 7
49+ - idf_ver : " latest"
50+ parallel_count : 10
51+ parallel_index : 8
52+ - idf_ver : " latest"
53+ parallel_count : 10
54+ parallel_index : 9
55+ - idf_ver : " latest"
56+ parallel_count : 10
57+ parallel_index : 10
3058 - idf_ver : " release-v5.1"
3159 parallel_count : 2
3260 parallel_index : 1
89117
90118 run-target :
91119 name : Run apps
92- if : github.repository_owner == 'espressif' && needs.prepare.outputs.build_only != '1'
120+ if : github.repository_owner == 'espressif' && !contains(github.event.pull_request.labels.*.name, 'Build only')
93121 needs : build
94122 strategy :
95123 fail-fast : false
@@ -151,6 +179,7 @@ jobs:
151179 target : " esp32s3"
152180 env :
153181 TEST_RESULT_NAME : test_results_${{ matrix.runner.target }}_${{ matrix.runner.marker }}_${{ matrix.idf_ver }}
182+ BENCHMARK_RESULT_NAME : benchmark_${{ matrix.runner.target }}_${{ matrix.runner.marker }}_${{ matrix.idf_ver }}
154183 TEST_RESULT_FILE : test_results_${{ matrix.runner.target }}_${{ matrix.runner.marker }}_${{ matrix.idf_ver }}.xml
155184 runs-on : [self-hosted, Linux, bspwall]
156185 container :
@@ -165,22 +194,58 @@ jobs:
165194 - name : Install Python packages
166195 env :
167196 PIP_EXTRA_INDEX_URL : " https://dl.espressif.com/pypi/"
168- run : pip install --prefer-binary cryptography pytest-embedded pytest-embedded-serial-esp pytest-embedded-idf pytest-custom_exit_code
197+ run : |
198+ echo "PYTEST_BENCHMARK_IGNORE=--ignore='examples/display_lvgl_benchmark'" >> $GITHUB_ENV
199+ pip install --prefer-binary cryptography pytest-embedded pytest-embedded-serial-esp pytest-embedded-idf pytest-custom_exit_code
200+ - name : Download latest results
201+ uses : actions/download-artifact@v4
202+ with :
203+ pattern : benchmark_*
204+ path : benchmark/
205+ - name : Set ignores
206+ if : contains(github.event.pull_request.labels.*.name, 'Run benchmark') || contains(inputs.WFType, 'Build + Tests + Benchmark') || github.ref_name == 'master'
207+ id : set_ignores
208+ run : |
209+ echo "PYTEST_BENCHMARK_IGNORE=" >> $GITHUB_ENV
210+ - name : Pull
211+ run : |
212+ git pull --rebase origin ${{ github.head_ref }} || echo "No remote changes to rebase"
169213 - name : Run apps
170214 run : |
171- pytest --suppress-no-test-exit-code --ignore-glob '*/managed_components/*' --ignore=.github --junit-xml=${{ env.TEST_RESULT_FILE }} --target=${{ matrix.runner.target }} -m ${{ matrix.runner.marker }} --build-dir=build_${{ matrix.runner.runs-on }}
215+ pytest --suppress-no-test-exit-code --ignore-glob '*/managed_components/*' --ignore=.github ${{ env.PYTEST_BENCHMARK_IGNORE }} --junit-xml=${{ env.TEST_RESULT_FILE }} --target=${{ matrix.runner.target }} -m ${{ matrix.runner.marker }} --build-dir=build_${{ matrix.runner.runs-on }}
172216 - name : Upload test results
173217 uses : actions/upload-artifact@v4
174218 if : always()
175219 with :
176220 name : ${{ env.TEST_RESULT_NAME }}
177- path : ${{ env.TEST_RESULT_FILE }}
221+ path : |
222+ ${{ env.TEST_RESULT_FILE }}
223+ benchmark_*.md
224+ benchmark_*.json
225+ benchmark.json
226+ - name : Upload test results
227+ uses : actions/upload-artifact@v4
228+ if : github.ref_name == 'master'
229+ with :
230+ name : ${{ env.BENCHMARK_RESULT_NAME }}
231+ path : |
232+ benchmark_*.md
233+ benchmark_*.json
234+ - name : Update benchmark release
235+ uses : pyTooling/Actions/releaser@r0
236+ if : github.ref_name == 'master'
237+ with :
238+ token : ${{ secrets.GITHUB_TOKEN }}
239+ files : |
240+ benchmark_*.json
241+ benchmark_*.md
242+ tag : benchmark-latest
178243
179244 publish-results :
180245 name : Publish Test results
181246 needs :
182247 - run-target
183- if : github.repository_owner == 'espressif' && always() && github.event_name == 'pull_request' && needs.prepare.outputs.build_only == '0'
248+ if : github.repository_owner == 'espressif' && always() && github.event_name == 'pull_request' && !contains(github.event.pull_request.labels.*.name, 'Build only')
184249 runs-on : ubuntu-22.04
185250 steps :
186251 - name : Download Test results
@@ -192,3 +257,29 @@ jobs:
192257 uses : EnricoMi/publish-unit-test-result-action@v2
193258 with :
194259 files : test_results/**/*.xml
260+ - name : Find test result files
261+ id : find_files
262+ run : |
263+ OUTPUT_FILE="combined_benchmarks.md"
264+ echo "" > $OUTPUT_FILE
265+ python <<EOF
266+ import glob
267+
268+ files = sorted(glob.glob("test_results/**/benchmark_*.md"))
269+ print(files)
270+ output_file = "combined_benchmarks.md"
271+
272+ with open(output_file, "w", encoding="utf-8") as outfile:
273+ for file in files:
274+ with open(file, "r", encoding="utf-8") as infile:
275+ outfile.write(infile.read() + "\n\n")
276+
277+ print(f"Merged {len(files)} files into {output_file}")
278+ EOF
279+
280+ echo "output_file=$OUTPUT_FILE" >> "$GITHUB_ENV"
281+ - name : Comment PR
282+ uses : thollander/actions-comment-pull-request@v3
283+ with :
284+ comment-tag : benchmark_results
285+ file-path : ${{ env.output_file }}
0 commit comments