forked from erigontech/erigon
-
Notifications
You must be signed in to change notification settings - Fork 0
396 lines (332 loc) · 18.2 KB
/
qa-txpool-performance-test.yml
File metadata and controls
396 lines (332 loc) · 18.2 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
name: QA - TxPool performance test
on:
workflow_call:
workflow_dispatch:
push:
branches:
- "release/3.*"
schedule:
# Every Sunday at 00:00 UTC
- cron: "0 0 * * 0"
jobs:
tx_pool_assertoor_test:
runs-on: [self-hosted, qa, X64, long-running]
env:
ERIGON_QA_PATH: /home/qarunner/erigon-qa
ENCLAVE_NAME: "kurtosis-run-${{ github.run_id }}"
steps:
- name: Fast checkout git repository
uses: actions/checkout@v6
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.ORG_DOCKERHUB_ERIGONTECH_USERNAME }}
password: ${{ secrets.ORG_DOCKERHUB_ERIGONTECH_TOKEN }}
- name: Docker build current branch
run: |
docker build -t test/erigon:current .
- name: Run self hosted Kurtosis + assertoor tests
id: erigon_kurtosis_test
uses: erigontech/kurtosis-assertoor-github-action@v1.1.7
with:
enclave_name: ${{ env.ENCLAVE_NAME }}
ethereum_package_args: ".github/workflows/kurtosis/txpool-assertoor.io"
kurtosis_extra_args: --verbosity detailed --cli-log-level trace
persistent_logs: "true"
clean_docker: "true"
- name: Parse Kurtosis output log and create JSON
if: always()
id: parse_kurtosis_output
run: |
# Find the folder starting with 'assertoor--'
assertoor_folder=$(find ${{ runner.temp }}/${{ env.ENCLAVE_NAME }}/dump -type d -name 'assertoor--*' | head -n 1)
# Exit if no assertoor folder is found
if [ -z "$assertoor_folder" ]; then
echo "No directory starting with 'assertoor--' found. Skipping."
exit 0
fi
# Parse the output.log file within the assertoor-- folder
output_log="${assertoor_folder}/output.log"
echo "Parsing file: $output_log"
# Create an empty JSON file if there are no results
output_json="${{ github.workspace }}/outputs_kurtosis.json"
echo "{}" > $output_json
# Create a temporary directory
tmp_dir=$(mktemp -d)
# Parse the output.log file to find lines with msg="outputs_json"
while IFS= read -r line; do
if [[ "$line" == *'msg="outputs_json:'* ]]; then
# Extract the JSON object and unescape the quotes
json_str=$(echo "$line" | sed 's/.*msg="outputs_json: \(.*\)".*/\1/' | sed 's/\\"/"/g')
# Extract the task value from the log line
task=$(echo "$line" | sed -n 's/.*task=\([^ ]*\).*/\1/p')
# Skip if task is empty
if [ -n "$task" ]; then
# Create or append to task file
if [ -f "$tmp_dir/$task" ]; then
echo "," >> "$tmp_dir/$task"
fi
echo "$json_str" >> "$tmp_dir/$task"
fi
fi
done < "$output_log"
# Write the JSON object to the final file
echo "{" > $output_json
first_task=true
for task_file in "$tmp_dir"/*; do
[ -f "$task_file" ] || continue
task=$(basename "$task_file")
# Add comma for all but first task
if [ "$first_task" = true ]; then
first_task=false
else
echo "," >> $output_json
fi
# Write the task array to the file
echo "\"$task\": [$(cat "$task_file")]" >> $output_json
done
echo "}" >> $output_json
echo "outputs_kurtosis.json: $output_json"
echo "success=true" >> $GITHUB_OUTPUT
# Clean up
rm -rf "$tmp_dir"
echo "cleaned up"
- name: Upload outputs_kurtosis.json as artifact
if: always() && steps.parse_kurtosis_output.outputs.success == 'true'
id: upload_outputs_kurtosis_json
uses: actions/upload-artifact@v6
with:
name: outputs_kurtosis.json
path: outputs_kurtosis.json
- name: Create HDR plots PNG artifacts
id: create_hdr_plots
if: always() && steps.parse_kurtosis_output.outputs.success == 'true'
run: |
# Check if the JSON file exists
output_json="${{ github.workspace }}/outputs_kurtosis.json"
if [ ! -f "$output_json" ]; then
echo "JSON file not found, skipping"
exit 0
fi
# Check if tx_pool_latency_analysis array exists in the JSON
if ! jq -e '.tx_pool_latency_analysis' "$output_json" > /dev/null 2>&1; then
echo "tx_pool_latency_analysis array not found in JSON, skipping"
exit 0
fi
# Get the array length
array_length=$(jq '.tx_pool_latency_analysis | length' "$output_json")
# Loop through each element in the array
for ((i=0; i<array_length; i++)); do
# Get tx_count for this element
tx_count=$(jq -r ".tx_pool_latency_analysis[$i].tx_count" "$output_json")
# Check if tx_pool_latency_hdr_plot exists for this element
if jq -e ".tx_pool_latency_analysis[$i].tx_pool_latency_hdr_plot" "$output_json" > /dev/null 2>&1; then
# Extract the plot data to a CSV file
csv_file="tx_pool_latency_analysis_${i}_${tx_count}.csv"
# Extract the plot data, replace literal '\n' with actual newlines and '\t' with actual tabs,
# and remove all lines starting with '#'
jq -r ".tx_pool_latency_analysis[$i].tx_pool_latency_hdr_plot" "$output_json" | \
sed 's/\\n/\n/g; s/\\t/\t/g' | grep -v "^#" > "$csv_file"
# Call the Python script to process the CSV file
python3 $ERIGON_QA_PATH/test_system/qa-tests/tx-pool/hdr_plot.py \
--input_file "$csv_file" \
--output_file "tx_pool_latency_analysis_${i}_${tx_count}.png"
# Delete the CSV file after processing
rm -f "$csv_file"
echo "Processed tx_pool_latency_analysis[$i] with tx_count=$tx_count"
else
echo "tx_pool_latency_hdr_plot not found for element $i, skipping"
fi
done
# Set an output to indicate we processed at least one element
echo "processed=true" >> $GITHUB_OUTPUT
- name: Create throughput plots PNG artifacts
id: create_throughput_plots
if: always() && steps.parse_kurtosis_output.outputs.success == 'true'
run: |
# Check if the JSON file exists
output_json="${{ github.workspace }}/outputs_kurtosis.json"
if [ ! -f "$output_json" ]; then
echo "JSON file not found, skipping"
exit 0
fi
# Check if tx_pool_throughput_analysis array exists in the JSON
if ! jq -e '.tx_pool_throughput_analysis' "$output_json" > /dev/null 2>&1; then
echo "tx_pool_throughput_analysis array not found in JSON, skipping"
exit 0
fi
# Get the array length
array_length=$(jq '.tx_pool_throughput_analysis | length' "$output_json")
# Loop through each element in the array
for ((i=0; i<array_length; i++)); do
# Check if throughput_measures exists for this element
if jq -e ".tx_pool_throughput_analysis[$i].throughput_measures" "$output_json" > /dev/null 2>&1; then
# Extract the throughput measures data to a CSV file
csv_file="tx_pool_throughput_analysis_${i}.csv"
# Extract the throughput_measures array and convert to CSV format
# Each measure contains load_tps, processed_tps, not_received_p2p_event_rate and coordinated_omission_event_rate
echo "load_tps,processed_tps,not_received_p2p_event_rate,coordinated_omission_event_rate" > "$csv_file"
jq -r ".tx_pool_throughput_analysis[$i].throughput_measures[] | [.load_tps, .processed_tps, .not_received_p2p_event_rate, .coordinated_omission_event_rate] | @csv" "$output_json" >> "$csv_file"
# Call the Python script to process the CSV file
python3 $ERIGON_QA_PATH/test_system/qa-tests/tx-pool/throughput_plot.py \
--input_file "$csv_file" \
--output_file "tx_pool_throughput_analysis_${i}.png"
# Delete the CSV file after processing
rm -f "$csv_file"
echo "Processed tx_pool_throughput_analysis[$i]"
else
echo "throughput_measures not found for element $i, skipping"
fi
done
# Set an output to indicate we processed at least one element
echo "processed=true" >> $GITHUB_OUTPUT
- name: Upload latency PNG images as artifacts
if: always() && steps.create_hdr_plots.outputs.processed == 'true'
id: upload_latency_plots
uses: actions/upload-artifact@v6
with:
name: tx-pool-latency-plots
path: tx_pool_latency_analysis_*.png
if-no-files-found: ignore
- name: Upload throughput PNG images as artifacts
if: always() && steps.create_throughput_plots.outputs.processed == 'true'
id: upload_throughput_plots
uses: actions/upload-artifact@v6
with:
name: tx-pool-throughput-plots
path: tx_pool_throughput_analysis_*.png
if-no-files-found: ignore
- name: Clean up PNG files
if: always() && (steps.create_hdr_plots.outputs.processed == 'true' || steps.create_throughput_plots.outputs.processed == 'true')
run: |
# Remove all generated PNG files
rm -f tx_pool_latency_analysis_*.png
rm -f tx_pool_throughput_analysis_*.png
- name: Save test results
if: always()
env:
TEST_RESULT: ${{ steps.erigon_kurtosis_test.outputs.test_result || 'failure' }}
run: |
python3 $ERIGON_QA_PATH/test_system/qa-tests/uploads/upload_test_results.py \
--repo erigon \
--commit $(git rev-parse HEAD) \
--branch ${{ github.ref_name }} \
--test_name txpool-performance \
--runner ${{ runner.name }} \
--outcome $TEST_RESULT \
--result_file ${{ github.workspace }}/outputs_kurtosis.json
- name: Report results in run summary
if: always()
run: |
output_json="${{ github.workspace }}/outputs_kurtosis.json"
# Start building the summary
echo "# TxPool Performance Test Results" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
# Check if JSON file exists
if [ ! -f "$output_json" ]; then
echo "❌ **No test results found** - JSON output file is missing" >> $GITHUB_STEP_SUMMARY
exit 0
fi
# Extract test result status
test_result="${{ steps.erigon_kurtosis_test.outputs.test_result || 'failure' }}"
if [ "$test_result" = "success" ]; then
echo "✅ **Overall Test Status:** PASSED" >> $GITHUB_STEP_SUMMARY
else
echo "❌ **Overall Test Status:** FAILED" >> $GITHUB_STEP_SUMMARY
fi
echo "" >> $GITHUB_STEP_SUMMARY
# Report throughput analysis
echo "## 📊 Throughput Analysis" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
if jq -e '.tx_pool_throughput_analysis' "$output_json" > /dev/null 2>&1; then
echo "| Max TPS | Missed Events % | Coordinated Omission Events % | Starting TPS | Ending TPS | Increment TPS | Duration (s) |" >> $GITHUB_STEP_SUMMARY
echo "|---------|-----------------|-------------------------------|--------------|------------|---------------|--------------|" >> $GITHUB_STEP_SUMMARY
throughput_length=$(jq '.tx_pool_throughput_analysis | length' "$output_json")
for ((i=0; i<throughput_length; i++)); do
max_tps=$(jq -r ".tx_pool_throughput_analysis[$i].max_tps" "$output_json")
missed_p2p_event_rate=$(jq -r ".tx_pool_throughput_analysis[$i].missed_p2p_event_rate" "$output_json")
coordinated_omission_event_rate=$(jq -r ".tx_pool_throughput_analysis[$i].coordinated_omission_event_rate" "$output_json")
starting_tps=$(jq -r ".tx_pool_throughput_analysis[$i].starting_tps" "$output_json")
ending_tps=$(jq -r ".tx_pool_throughput_analysis[$i].ending_tps" "$output_json")
increment_tps=$(jq -r ".tx_pool_throughput_analysis[$i].increment_tps" "$output_json")
duration_s=$(jq -r ".tx_pool_throughput_analysis[$i].duration_s" "$output_json")
# Format percentages from decimal to percentage format
if [ "$missed_p2p_event_rate" != "null" ] && [ -n "$missed_p2p_event_rate" ] && [[ "$missed_p2p_event_rate" =~ ^[0-9]+\.?[0-9]*$ ]]; then
missed_p2p_event_count_percentage=$(printf "%.2f%%" $(echo "scale=6; $missed_p2p_event_rate * 100" | bc -l))
else
missed_p2p_event_count_percentage="N/A"
fi
if [ "$coordinated_omission_event_rate" != "null" ] && [ -n "$coordinated_omission_event_rate" ] && [[ "$coordinated_omission_event_rate" =~ ^[0-9]+\.?[0-9]*$ ]]; then
coordinated_omission_event_count_percentage=$(printf "%.2f%%" $(echo "scale=6; $coordinated_omission_event_rate * 100" | bc -l))
else
coordinated_omission_event_count_percentage="N/A"
fi
echo "| $max_tps | $missed_p2p_event_count_percentage | $coordinated_omission_event_count_percentage | $starting_tps | $ending_tps | $increment_tps | $duration_s |" >> $GITHUB_STEP_SUMMARY
done
else
echo "⚠️ No throughput analysis data available" >> $GITHUB_STEP_SUMMARY
fi
echo "" >> $GITHUB_STEP_SUMMARY
# Report latency analysis
echo "## 🕐 Latency Analysis" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
if jq -e '.tx_pool_latency_analysis' "$output_json" > /dev/null 2>&1; then
echo "| TX Count | Min (μs) | Max (ms) | Coordinated Omission Events % | Missed Events % | Duplicated Events % |" >> $GITHUB_STEP_SUMMARY
echo "|----------|----------|----------|-------------------------------|-----------------|---------------------|" >> $GITHUB_STEP_SUMMARY
latency_length=$(jq '.tx_pool_latency_analysis | length' "$output_json")
for ((i=0; i<latency_length; i++)); do
tx_count=$(jq -r ".tx_pool_latency_analysis[$i].tx_count" "$output_json")
min_us=$(jq -r ".tx_pool_latency_analysis[$i].min_latency_mus" "$output_json")
max_us=$(jq -r ".tx_pool_latency_analysis[$i].max_latency_mus" "$output_json")
missed_p2p_event_rate=$(jq -r ".tx_pool_latency_analysis[$i].missed_p2p_event_rate" "$output_json")
coor_omission_event_rate=$(jq -r ".tx_pool_latency_analysis[$i].coordinated_omission_event_rate" "$output_json")
duplicated_p2p_event_rate=$(jq -r ".tx_pool_latency_analysis[$i].duplicated_p2p_event_rate" "$output_json")
# Convert max from microseconds to milliseconds, handle null/invalid values
if [ "$max_us" != "null" ] && [ -n "$max_us" ] && [[ "$max_us" =~ ^[0-9]+\.?[0-9]*$ ]]; then
max_ms=$(echo "scale=2; $max_us / 1000" | bc -l)
else
max_ms="N/A"
fi
# Format percentages from decimal to percentage format
if [ "$missed_p2p_event_rate" != "null" ] && [ -n "$missed_p2p_event_rate" ] && [[ "$missed_p2p_event_rate" =~ ^[0-9]+\.?[0-9]*$ ]]; then
missed_p2p_event_count_percentage=$(printf "%.2f%%" $(echo "scale=6; $missed_p2p_event_rate * 100" | bc -l))
else
missed_p2p_event_count_percentage="N/A"
fi
if [ "$coor_omission_event_rate" != "null" ] && [ -n "$coor_omission_event_rate" ] && [[ "$coor_omission_event_rate" =~ ^[0-9]+\.?[0-9]*$ ]]; then
coor_omission_event_count_percentage=$(printf "%.2f%%" $(echo "scale=6; $coor_omission_event_rate * 100" | bc -l))
else
coor_omission_event_count_percentage="N/A"
fi
if [ "$duplicated_p2p_event_rate" != "null" ] && [ -n "$duplicated_p2p_event_rate" ] && [[ "$duplicated_p2p_event_rate" =~ ^[0-9]+\.?[0-9]*$ ]]; then
duplicated_p2p_event_count_percentage=$(printf "%.2f%%" $(echo "scale=6; $duplicated_p2p_event_rate * 100" | bc -l))
else
duplicated_p2p_event_count_percentage="N/A"
fi
echo "| $tx_count | $min_us | $max_ms | $coor_omission_event_count_percentage | $missed_p2p_event_count_percentage | $duplicated_p2p_event_count_percentage |" >> $GITHUB_STEP_SUMMARY
done
else
echo "⚠️ No latency analysis data available" >> $GITHUB_STEP_SUMMARY
fi
echo "" >> $GITHUB_STEP_SUMMARY
# Add artifact links
echo "## 📈 Artifacts" >> $GITHUB_STEP_SUMMARY
# Add link to raw JSON results if available
if [ -f "$output_json" ]; then
echo "- [Raw JSON Results](${{ steps.upload_outputs_kurtosis_json.outputs.artifact-url }})" >> $GITHUB_STEP_SUMMARY
fi
# Add link to latency plots if they were generated
if [ "${{ steps.upload_latency_plots.outputs.artifact-url }}" != "" ]; then
echo "- [Latency Distribution Plots](${{ steps.upload_latency_plots.outputs.artifact-url }})" >> $GITHUB_STEP_SUMMARY
fi
# Add link to throughput plots if they were generated
if [ "${{ steps.upload_throughput_plots.outputs.artifact-url }}" != "" ]; then
echo "- [Throughput Analysis Plots](${{ steps.upload_throughput_plots.outputs.artifact-url }})" >> $GITHUB_STEP_SUMMARY
fi
- name: Clean test results
uses: gacts/run-and-post-run@v1.4.3
if: always()
with:
post: |
rm -rf kurtosis_artifacts
rm -f outputs_kurtosis.json