diff --git a/tools/jenkins/run_performance_profile_daily.sh b/tools/jenkins/run_performance_profile_daily.sh
index 39c3f2c996a0477dd7e874f515e7a511e008a8f6..f239fad18866fa284ddd2ee1c664eeb381c92e42 100755
--- a/tools/jenkins/run_performance_profile_daily.sh
+++ b/tools/jenkins/run_performance_profile_daily.sh
@@ -32,8 +32,6 @@ set -ex
 
 cd $(dirname $0)/../..
 
-CPUS=`python -c 'import multiprocessing; print multiprocessing.cpu_count()'`
-
 # try to use pypy for generating reports
 # each trace dumps 7-8gig of text to disk, and processing this into a report is
 # heavyweight - so any speed boost is worthwhile
@@ -45,4 +43,3 @@ else
 fi
 
 $PYTHON tools/run_tests/run_microbenchmark.py --collect summary perf latency
-
diff --git a/tools/jenkins/run_performance_profile_hourly.sh b/tools/jenkins/run_performance_profile_hourly.sh
index 1d5930eb416076effdb4694716ae49014d6cd65a..dfcc2bb1163c15fc4f3a0679cf5573b1741d2653 100755
--- a/tools/jenkins/run_performance_profile_hourly.sh
+++ b/tools/jenkins/run_performance_profile_hourly.sh
@@ -32,9 +32,10 @@ set -ex
 
 cd $(dirname $0)/../..
 
-make CONFIG=opt memory_profile_test memory_profile_client memory_profile_server
+CPUS=`python -c 'import multiprocessing; print multiprocessing.cpu_count()'`
+
+make CONFIG=opt memory_profile_test memory_profile_client memory_profile_server -j $CPUS
 bins/opt/memory_profile_test
 bq load microbenchmarks.memory memory_usage.csv
 
 tools/run_tests/run_microbenchmark.py --collect summary --bigquery_upload
-