Skip to content
Snippets Groups Projects
Commit c9dbd5b9 authored by Craig Tiller's avatar Craig Tiller Committed by GitHub
Browse files

Merge pull request #9732 from ctiller/bm_perf

Serialize running of perf, switch back to -f for consistent event counts
parents 48dbdfd9 c2c0c6f5
No related branches found
No related tags found
No related merge requests found
...@@ -134,7 +134,7 @@ def collect_perf(bm_name, args): ...@@ -134,7 +134,7 @@ def collect_perf(bm_name, args):
link(line, '%s.svg' % fnize(line)) link(line, '%s.svg' % fnize(line))
benchmarks.append( benchmarks.append(
jobset.JobSpec(['perf', 'record', '-o', '%s-perf.data' % fnize(line), jobset.JobSpec(['perf', 'record', '-o', '%s-perf.data' % fnize(line),
'-g', '-c', '1000', '-g', '-F', '997',
'bins/mutrace/%s' % bm_name, 'bins/mutrace/%s' % bm_name,
'--benchmark_filter=^%s$' % line, '--benchmark_filter=^%s$' % line,
'--benchmark_min_time=10'])) '--benchmark_min_time=10']))
...@@ -152,7 +152,7 @@ def collect_perf(bm_name, args): ...@@ -152,7 +152,7 @@ def collect_perf(bm_name, args):
if len(benchmarks) >= 20: if len(benchmarks) >= 20:
# run up to half the cpu count: each benchmark can use up to two cores # run up to half the cpu count: each benchmark can use up to two cores
# (one for the microbenchmark, one for the data flush) # (one for the microbenchmark, one for the data flush)
jobset.run(benchmarks, maxjobs=max(1, multiprocessing.cpu_count()/2), jobset.run(benchmarks, maxjobs=1,
add_env={'GRPC_TEST_PORT_SERVER': 'localhost:%d' % port_server_port}) add_env={'GRPC_TEST_PORT_SERVER': 'localhost:%d' % port_server_port})
jobset.run(profile_analysis, maxjobs=multiprocessing.cpu_count()) jobset.run(profile_analysis, maxjobs=multiprocessing.cpu_count())
jobset.run(cleanup, maxjobs=multiprocessing.cpu_count()) jobset.run(cleanup, maxjobs=multiprocessing.cpu_count())
...@@ -161,7 +161,7 @@ def collect_perf(bm_name, args): ...@@ -161,7 +161,7 @@ def collect_perf(bm_name, args):
cleanup = [] cleanup = []
# run the remaining benchmarks that weren't flushed # run the remaining benchmarks that weren't flushed
if len(benchmarks): if len(benchmarks):
jobset.run(benchmarks, maxjobs=max(1, multiprocessing.cpu_count()/2), jobset.run(benchmarks, maxjobs=1,
add_env={'GRPC_TEST_PORT_SERVER': 'localhost:%d' % port_server_port}) add_env={'GRPC_TEST_PORT_SERVER': 'localhost:%d' % port_server_port})
jobset.run(profile_analysis, maxjobs=multiprocessing.cpu_count()) jobset.run(profile_analysis, maxjobs=multiprocessing.cpu_count())
jobset.run(cleanup, maxjobs=multiprocessing.cpu_count()) jobset.run(cleanup, maxjobs=multiprocessing.cpu_count())
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment