diff --git a/tools/run_tests/performance/bq_upload_result.py b/tools/run_tests/performance/bq_upload_result.py
index 0ea23d22122e7f38810d13cd5f861d99229f89f5..ddcf053ae5ec9a92aead775bc55a4c3332665faa 100755
--- a/tools/run_tests/performance/bq_upload_result.py
+++ b/tools/run_tests/performance/bq_upload_result.py
@@ -115,6 +115,9 @@ def _flatten_result_inplace(scenario_result):
   scenario_result['scenario']['clientConfig'] = json.dumps(scenario_result['scenario']['clientConfig'])
   scenario_result['scenario']['serverConfig'] = json.dumps(scenario_result['scenario']['serverConfig'])
   scenario_result['latencies'] = json.dumps(scenario_result['latencies'])
+  for stats in scenario_result['serverStats']:
+    stats.pop('totalCpuTime', None)
+    stats.pop('idleCpuTime', None)
   for stats in scenario_result['clientStats']:
     stats['latencies'] = json.dumps(stats['latencies'])
     stats.pop('requestResults', None)
@@ -122,6 +125,7 @@ def _flatten_result_inplace(scenario_result):
   scenario_result['clientSuccess'] = json.dumps(scenario_result['clientSuccess'])
   scenario_result['serverSuccess'] = json.dumps(scenario_result['serverSuccess'])
   scenario_result['requestResults'] = json.dumps(scenario_result.get('requestResults', []))
+  scenario_result['summary'].pop('serverCpuUsage', None)
   scenario_result['summary'].pop('successfulRequestsPerSecond', None)
   scenario_result['summary'].pop('failedRequestsPerSecond', None)