diff --git a/tools/run_tests/run_microbenchmark.py b/tools/run_tests/run_microbenchmark.py
index a9a563c1bec174b54e91b88fad7e655341b39e18..ea87b6cd9861397d8fd3cf7a34ae4a0727cf50ac 100755
--- a/tools/run_tests/run_microbenchmark.py
+++ b/tools/run_tests/run_microbenchmark.py
@@ -128,18 +128,19 @@ def collect_perf(bm_name, args):
        'CONFIG=mutrace', '-j', '%d' % multiprocessing.cpu_count()])
   for line in subprocess.check_output(['bins/mutrace/%s' % bm_name,
                                        '--benchmark_list_tests']).splitlines():
-    subprocess.check_call(['sudo', 'perf', 'record', '-o', 'perf.data',
+    subprocess.check_call(['perf', 'record', '-o', '%s-perf.data' % fnize(line),
                            '-g', '-c', '1000',
                            'bins/mutrace/%s' % bm_name,
                            '--benchmark_filter=^%s$' % line,
                            '--benchmark_min_time=20'])
-    subprocess.check_call(['sudo', 'perf', 'script', '-i', 'perf.data', '>', 'bm.perf'], shell=True)
-    subprocess.check_call([
-        '%s/stackcollapse-perf.pl' % flamegraph_dir, 'bm.perf', '>', 'bm.folded'], shell=True)
-    link(line, '%s.svg' % fnize(line))
-    with open('reports/%s.svg' % fnize(line), 'w') as f:
-      f.write(subprocess.check_output([
-          '%s/flamegraph.pl' % flamegraph_dir, 'bm.folded']))
+    env = os.environ.copy()
+    env.update({
+      'PERF_BASE_NAME': fnize(line),
+      'OUTPUT_DIR': 'reports',
+      'OUTPUT_FILENAME': fnize(line),
+    })
+    subprocess.check_call(['tools/run_tests/performance/process_local_perf_flamegraphs.sh'],
+                          env=env)
 
 def collect_summary(bm_name, args):
   heading('Summary: %s' % bm_name)