33
33
"GITHUB_REPOSITORY" , "faster-cpython/bench_runner"
34
34
)
35
35
# Environment variables that control the execution of CPython
36
- ENV_VARS = ["PYTHON_JIT" ]
36
+ ENV_VARS = ["PYTHON_JIT" , "PYPERF_PERF_RECORD_EXTRA_OPTS" ]
37
37
38
38
39
39
class NoBenchmarkError (Exception ):
@@ -64,7 +64,7 @@ def get_benchmark_names(benchmarks: str) -> list[str]:
64
64
def run_benchmarks (
65
65
python : PathLike ,
66
66
benchmarks : str ,
67
- command_prefix : Iterable [ str ] | None = None ,
67
+ / ,
68
68
test_mode : bool = False ,
69
69
extra_args : Iterable [str ] | None = None ,
70
70
) -> None :
@@ -74,9 +74,6 @@ def run_benchmarks(
74
74
if BENCHMARK_JSON .is_file ():
75
75
BENCHMARK_JSON .unlink ()
76
76
77
- if command_prefix is None :
78
- command_prefix = []
79
-
80
77
if test_mode :
81
78
fast_arg = ["--fast" ]
82
79
else :
@@ -86,7 +83,6 @@ def run_benchmarks(
86
83
extra_args = []
87
84
88
85
args = [
89
- * command_prefix ,
90
86
sys .executable ,
91
87
"-m" ,
92
88
"pyperformance" ,
@@ -173,19 +169,36 @@ def collect_pystats(
173
169
run_summarize_stats (python , fork , ref , "all" , benchmark_links , flags = flags )
174
170
175
171
176
- def perf_to_csv (lines : Iterable [str ], output : PathLike ):
177
- event_count_prefix = "# Event count (approx.): "
178
- total = None
172
+ def get_perf_lines (files : Iterable [PathLike ]) -> Iterable [str ]:
173
+ for filename in files :
174
+ p = subprocess .Popen (
175
+ [
176
+ "perf" ,
177
+ "report" ,
178
+ "--stdio" ,
179
+ "-g" ,
180
+ "none" ,
181
+ "--show-total-period" ,
182
+ "-s" ,
183
+ "pid,symbol,dso" ,
184
+ "-i" ,
185
+ str (filename ),
186
+ ],
187
+ encoding = "utf-8" ,
188
+ stdout = subprocess .PIPE ,
189
+ bufsize = 1 ,
190
+ )
191
+ assert p .stdout is not None # for pyright
192
+ yield from iter (p .stdout .readline , "" )
193
+ p .kill ()
194
+
179
195
196
+ def perf_to_csv (lines : Iterable [str ], output : PathLike ):
180
197
rows = []
181
198
for line in lines :
182
199
line = line .strip ()
183
- if line .startswith (event_count_prefix ):
184
- total = int (line [len (event_count_prefix ) :].strip ())
185
- elif line .startswith ("#" ) or line == "" :
200
+ if line .startswith ("#" ) or line == "" :
186
201
pass
187
- elif total is None :
188
- raise ValueError ("Could not find total sample count" )
189
202
else :
190
203
_ , period , command , _ , symbol , shared , _ = line .split (maxsplit = 6 )
191
204
pid , command = command .split (":" )
@@ -209,47 +222,28 @@ def collect_perf(python: PathLike, benchmarks: str):
209
222
shutil .rmtree (PROFILING_RESULTS )
210
223
PROFILING_RESULTS .mkdir ()
211
224
212
- perf_data = Path ( "perf.data" )
225
+ perf_data_glob = "perf.data.*"
213
226
for benchmark in all_benchmarks :
214
- if perf_data . exists ( ):
215
- perf_data .unlink ()
216
-
217
- try :
218
- run_benchmarks (
219
- python ,
220
- benchmark ,
221
- command_prefix = [
222
- "perf" ,
223
- "record" ,
224
- "-o" ,
225
- "perf.data" ,
226
- "--" ,
227
- ] ,
227
+ for filename in Path ( "." ). glob ( perf_data_glob ):
228
+ filename .unlink ()
229
+
230
+ run_benchmarks (
231
+ python ,
232
+ benchmark ,
233
+ extra_args = [ "--hook" , "perf_record" ] ,
234
+ )
235
+
236
+ fileiter = Path ( "." ). glob ( perf_data_glob )
237
+ if util . has_any_element ( fileiter ):
238
+ perf_to_csv (
239
+ get_perf_lines ( fileiter ) ,
240
+ PROFILING_RESULTS / f" { benchmark } .perf.csv" ,
228
241
)
229
- except NoBenchmarkError :
230
- pass
231
242
else :
232
- if perf_data .exists ():
233
- output = subprocess .check_output (
234
- [
235
- "perf" ,
236
- "report" ,
237
- "--stdio" ,
238
- "-g" ,
239
- "none" ,
240
- "--show-total-period" ,
241
- "-s" ,
242
- "pid,symbol,dso" ,
243
- "-i" ,
244
- "perf.data" ,
245
- ],
246
- encoding = "utf-8" ,
247
- )
248
- perf_to_csv (
249
- output .splitlines (), PROFILING_RESULTS / f"{ benchmark } .perf.csv"
250
- )
251
- else :
252
- print (f"No perf.data file generated for { benchmark } " , file = sys .stderr )
243
+ print (f"No perf.data files generated for { benchmark } " , file = sys .stderr )
244
+
245
+ for filename in Path ("." ).glob (perf_data_glob ):
246
+ filename .unlink ()
253
247
254
248
255
249
def update_metadata (
@@ -381,7 +375,7 @@ def _main(
381
375
benchmarks = select_benchmarks (benchmarks )
382
376
383
377
if mode == "benchmark" :
384
- run_benchmarks (python , benchmarks , [], test_mode )
378
+ run_benchmarks (python , benchmarks , test_mode = test_mode )
385
379
update_metadata (BENCHMARK_JSON , fork , ref , run_id = run_id )
386
380
copy_to_directory (BENCHMARK_JSON , python , fork , ref , flags )
387
381
elif mode == "perf" :
0 commit comments