Skip to content
Snippets Groups Projects
Commit 7363674e authored by Jan Tattermusch's avatar Jan Tattermusch
Browse files

Merge pull request #6477 from jtattermusch/benchmarking_add_netperf

Add support for running netperf as part of benchmarks.
parents 8b382748 6de6971b
No related branches found
No related tags found
No related merge requests found
...@@ -50,7 +50,7 @@ gcloud compute instances create $INSTANCE_NAME \ ...@@ -50,7 +50,7 @@ gcloud compute instances create $INSTANCE_NAME \
--machine-type $MACHINE_TYPE \ --machine-type $MACHINE_TYPE \
--image ubuntu-15-10 \ --image ubuntu-15-10 \
--boot-disk-size 300 \ --boot-disk-size 300 \
--scope https://www.googleapis.com/auth/bigquery --scopes https://www.googleapis.com/auth/bigquery
echo 'Created GCE instance, waiting 60 seconds for it to come online.' echo 'Created GCE instance, waiting 60 seconds for it to come online.'
sleep 60 sleep 60
......
...@@ -77,6 +77,9 @@ sudo apt-get install -y \ ...@@ -77,6 +77,9 @@ sudo apt-get install -y \
# perftools # perftools
sudo apt-get install -y google-perftools libgoogle-perftools-dev sudo apt-get install -y google-perftools libgoogle-perftools-dev
# netperf
sudo apt-get install -y netperf
# C++ dependencies # C++ dependencies
sudo apt-get install -y libgflags-dev libgtest-dev libc++-dev clang sudo apt-get install -y libgflags-dev libgtest-dev libc++-dev clang
......
...@@ -48,20 +48,47 @@ import big_query_utils ...@@ -48,20 +48,47 @@ import big_query_utils
_PROJECT_ID='grpc-testing' _PROJECT_ID='grpc-testing'
def _upload_scenario_result_to_bigquery(dataset_id, table_id, result_file): def _upload_netperf_latency_csv_to_bigquery(dataset_id, table_id, result_file):
with open(result_file, 'r') as f:
(col1, col2, col3) = f.read().split(',')
latency50 = float(col1.strip()) * 1000
latency90 = float(col2.strip()) * 1000
latency99 = float(col3.strip()) * 1000
scenario_result = {
'scenario': {
'name': 'netperf_tcp_rr'
},
'summary': {
'latency50': latency50,
'latency90': latency90,
'latency99': latency99
}
}
bq = big_query_utils.create_big_query() bq = big_query_utils.create_big_query()
_create_results_table(bq, dataset_id, table_id) _create_results_table(bq, dataset_id, table_id)
if not _insert_result(bq, dataset_id, table_id, scenario_result, flatten=False):
print 'Error uploading result to bigquery.'
sys.exit(1)
def _upload_scenario_result_to_bigquery(dataset_id, table_id, result_file):
with open(result_file, 'r') as f: with open(result_file, 'r') as f:
scenario_result = json.loads(f.read()) scenario_result = json.loads(f.read())
bq = big_query_utils.create_big_query()
_create_results_table(bq, dataset_id, table_id)
if not _insert_result(bq, dataset_id, table_id, scenario_result): if not _insert_result(bq, dataset_id, table_id, scenario_result):
print 'Error uploading result to bigquery.' print 'Error uploading result to bigquery.'
sys.exit(1) sys.exit(1)
def _insert_result(bq, dataset_id, table_id, scenario_result): def _insert_result(bq, dataset_id, table_id, scenario_result, flatten=True):
_flatten_result_inplace(scenario_result) if flatten:
_flatten_result_inplace(scenario_result)
_populate_metadata_inplace(scenario_result) _populate_metadata_inplace(scenario_result)
row = big_query_utils.make_row(str(uuid.uuid4()), scenario_result) row = big_query_utils.make_row(str(uuid.uuid4()), scenario_result)
return big_query_utils.insert_rows(bq, return big_query_utils.insert_rows(bq,
...@@ -127,9 +154,17 @@ argp.add_argument('--bq_result_table', required=True, default=None, type=str, ...@@ -127,9 +154,17 @@ argp.add_argument('--bq_result_table', required=True, default=None, type=str,
help='Bigquery "dataset.table" to upload results to.') help='Bigquery "dataset.table" to upload results to.')
argp.add_argument('--file_to_upload', default='scenario_result.json', type=str, argp.add_argument('--file_to_upload', default='scenario_result.json', type=str,
help='Report file to upload.') help='Report file to upload.')
argp.add_argument('--file_format',
choices=['scenario_result','netperf_latency_csv'],
default='scenario_result',
help='Format of the file to upload.')
args = argp.parse_args() args = argp.parse_args()
dataset_id, table_id = args.bq_result_table.split('.', 2) dataset_id, table_id = args.bq_result_table.split('.', 2)
_upload_scenario_result_to_bigquery(dataset_id, table_id, args.file_to_upload)
if args.file_format == 'netperf_latency_csv':
_upload_netperf_latency_csv_to_bigquery(dataset_id, table_id, args.file_to_upload)
else:
_upload_scenario_result_to_bigquery(dataset_id, table_id, args.file_to_upload)
print 'Successfully uploaded %s to BigQuery.\n' % args.file_to_upload print 'Successfully uploaded %s to BigQuery.\n' % args.file_to_upload
#!/bin/bash
# Copyright 2015, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
set -ex
cd $(dirname $0)/../../..
netperf >netperf_latency.txt -P 0 -t TCP_RR -H "$NETPERF_SERVER_HOST" -- -r 1,1 -o P50_LATENCY,P90_LATENCY,P99_LATENCY
cat netperf_latency.txt
if [ "$BQ_RESULT_TABLE" != "" ]
then
tools/run_tests/performance/bq_upload_result.py \
--file_to_upload=netperf_latency.txt \
--file_format=netperf_latency_csv \
--bq_result_table="$BQ_RESULT_TABLE"
fi
...@@ -131,6 +131,25 @@ def create_quit_jobspec(workers, remote_host=None): ...@@ -131,6 +131,25 @@ def create_quit_jobspec(workers, remote_host=None):
verbose_success=True) verbose_success=True)
def create_netperf_jobspec(server_host='localhost', client_host=None,
bq_result_table=None):
"""Runs netperf benchmark."""
cmd = 'NETPERF_SERVER_HOST="%s" ' % server_host
if bq_result_table:
cmd += 'BQ_RESULT_TABLE="%s" ' % bq_result_table
cmd += 'tools/run_tests/performance/run_netperf.sh'
if client_host:
user_at_host = '%s@%s' % (_REMOTE_HOST_USERNAME, client_host)
cmd = 'ssh %s "cd ~/performance_workspace/grpc/ && "%s' % (user_at_host, pipes.quote(cmd))
return jobset.JobSpec(
cmdline=[cmd],
shortname='netperf',
timeout_seconds=60,
shell=True,
verbose_success=True)
def archive_repo(languages): def archive_repo(languages):
"""Archives local version of repo including submodules.""" """Archives local version of repo including submodules."""
cmdline=['tar', '-cf', '../grpc.tar', '../grpc/'] cmdline=['tar', '-cf', '../grpc.tar', '../grpc/']
...@@ -244,12 +263,28 @@ def start_qpsworkers(languages, worker_hosts): ...@@ -244,12 +263,28 @@ def start_qpsworkers(languages, worker_hosts):
def create_scenarios(languages, workers_by_lang, remote_host=None, regex='.*', def create_scenarios(languages, workers_by_lang, remote_host=None, regex='.*',
category='all', bq_result_table=None): category='all', bq_result_table=None,
netperf=False, netperf_hosts=[]):
"""Create jobspecs for scenarios to run.""" """Create jobspecs for scenarios to run."""
all_workers = [worker all_workers = [worker
for workers in workers_by_lang.values() for workers in workers_by_lang.values()
for worker in workers] for worker in workers]
scenarios = [] scenarios = []
if netperf:
if not netperf_hosts:
netperf_server='localhost'
netperf_client=None
elif len(netperf_hosts) == 1:
netperf_server=netperf_hosts[0]
netperf_client=netperf_hosts[0]
else:
netperf_server=netperf_hosts[0]
netperf_client=netperf_hosts[1]
scenarios.append(create_netperf_jobspec(server_host=netperf_server,
client_host=netperf_client,
bq_result_table=bq_result_table))
for language in languages: for language in languages:
for scenario_json in language.scenarios(): for scenario_json in language.scenarios():
if re.search(args.regex, scenario_json['name']): if re.search(args.regex, scenario_json['name']):
...@@ -316,6 +351,11 @@ argp.add_argument('--category', ...@@ -316,6 +351,11 @@ argp.add_argument('--category',
choices=['smoketest','all'], choices=['smoketest','all'],
default='smoketest', default='smoketest',
help='Select a category of tests to run. Smoketest runs by default.') help='Select a category of tests to run. Smoketest runs by default.')
argp.add_argument('--netperf',
default=False,
action='store_const',
const=True,
help='Run netperf benchmark as one of the scenarios.')
args = argp.parse_args() args = argp.parse_args()
...@@ -360,7 +400,10 @@ try: ...@@ -360,7 +400,10 @@ try:
remote_host=args.remote_driver_host, remote_host=args.remote_driver_host,
regex=args.regex, regex=args.regex,
category=args.category, category=args.category,
bq_result_table=args.bq_result_table) bq_result_table=args.bq_result_table,
netperf=args.netperf,
netperf_hosts=args.remote_worker_host)
if not scenarios: if not scenarios:
raise Exception('No scenarios to run') raise Exception('No scenarios to run')
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment