Remove benchmarking-tests folder.

Remove references to removed folders.
This commit is contained in:
Rodolphe Marques 2017-04-03 11:57:56 +02:00
parent 4bcd7dd1e2
commit cee2f94f89
7 changed files with 1 additions and 230 deletions

4
.gitattributes vendored
View File

@ -1,11 +1,9 @@
benchmarking-tests export-ignore
deploy-cluster-aws export-ignore
docs export-ignore
ntools export-ignore
speed-tests export-ignore
tests export-ignore
.gitattributes export-ignore
.gitignore export-ignore
.travis.yml export-ignore
*.md export-ignore
codecov.yml export-ignore
codecov.yml export-ignore

2
.gitignore vendored
View File

@ -71,8 +71,6 @@ deploy-cluster-aws/confiles/
deploy-cluster-aws/client_confile
deploy-cluster-aws/hostlist.py
deploy-cluster-aws/ssh_key.py
benchmarking-tests/hostlist.py
benchmarking-tests/ssh_key.py
# Ansible-specific files
ntools/one-m/ansible/hosts

View File

@ -1,3 +0,0 @@
# Benchmarking tests
This folder contains util files and test case folders to benchmark the performance of a BigchainDB cluster.

View File

@ -1,154 +0,0 @@
import multiprocessing as mp
import uuid
import argparse
import csv
import time
import logging
import rethinkdb as r
from bigchaindb.common.transaction import Transaction
from bigchaindb import Bigchain
from bigchaindb.utils import ProcessGroup
from bigchaindb.commands import utils
SIZE_OF_FILLER = {'minimal': 0,
'small': 10**3,
'medium': 10**4,
'large': 10**5}
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
def create_write_transaction(tx_left, payload_filler):
b = Bigchain()
payload_dict = {}
if payload_filler:
payload_dict['filler'] = payload_filler
while tx_left > 0:
# Include a random uuid string in the payload
# to prevent duplicate transactions
# (i.e. transactions with the same hash)
payload_dict['msg'] = str(uuid.uuid4())
tx = Transaction.create([b.me], [b.me], payload=payload_dict)
tx = tx.sign([b.me_private])
b.write_transaction(tx)
tx_left -= 1
def run_add_backlog(args):
tx_left = args.num_transactions // mp.cpu_count()
payload_filler = 'x' * SIZE_OF_FILLER[args.payload_size]
workers = ProcessGroup(target=create_write_transaction,
args=(tx_left, payload_filler))
workers.start()
def run_gather_metrics(args):
# setup a rethinkdb connection
conn = r.connect(args.bigchaindb_host, 28015, 'bigchain')
# setup csv writer
csv_file = open(args.csvfile, 'w')
csv_writer = csv.writer(csv_file)
# query for the number of transactions on the backlog
num_transactions = r.table('backlog').count().run(conn)
num_transactions_received = 0
initial_time = None
logger.info('Starting gathering metrics.')
logger.info('{} transasctions in the backlog'.format(num_transactions))
logger.info('This process should exit automatically. '
'If this does not happen you can exit at any time using Ctrl-C '
'saving all the metrics gathered up to this point.')
logger.info('\t{:<20} {:<20} {:<20} {:<20}'.format(
'timestamp',
'tx in block',
'tx/s',
'% complete'
))
# listen to the changefeed
try:
for change in r.table('bigchain').changes().run(conn):
# check only for new blocks
if change['old_val'] is None:
block_num_transactions = len(
change['new_val']['block']['transactions']
)
time_now = time.time()
csv_writer.writerow(
[str(time_now), str(block_num_transactions)]
)
# log statistics
if initial_time is None:
initial_time = time_now
num_transactions_received += block_num_transactions
elapsed_time = time_now - initial_time
percent_complete = round(
(num_transactions_received / num_transactions) * 100
)
if elapsed_time != 0:
transactions_per_second = round(
num_transactions_received / elapsed_time
)
else:
transactions_per_second = float('nan')
logger.info('\t{:<20} {:<20} {:<20} {:<20}'.format(
time_now,
block_num_transactions,
transactions_per_second,
percent_complete
))
if (num_transactions - num_transactions_received) == 0:
break
except KeyboardInterrupt:
logger.info('Interrupted. Exiting early...')
finally:
# close files
csv_file.close()
def main():
parser = argparse.ArgumentParser(description='BigchainDB benchmarking utils')
subparsers = parser.add_subparsers(title='Commands', dest='command')
# add transactions to backlog
backlog_parser = subparsers.add_parser('add-backlog',
help='Add transactions to the backlog')
backlog_parser.add_argument('num_transactions',
metavar='num_transactions',
type=int, default=0,
help='Number of transactions to add to the backlog')
backlog_parser.add_argument('-s', '--payload-size',
choices=SIZE_OF_FILLER.keys(),
default='minimal',
help='Payload size')
# metrics
metrics_parser = subparsers.add_parser('gather-metrics',
help='Gather metrics to a csv file')
metrics_parser.add_argument('-b', '--bigchaindb-host',
required=True,
help=('Bigchaindb node hostname to connect '
'to gather cluster metrics'))
metrics_parser.add_argument('-c', '--csvfile',
required=True,
help='Filename to save the metrics')
utils.start(parser, globals())
if __name__ == '__main__':
main()

View File

@ -1,46 +0,0 @@
from __future__ import with_statement, unicode_literals
from fabric.api import sudo, env, hosts
from fabric.api import task, parallel
from fabric.contrib.files import sed
from fabric.operations import run, put
from fabric.context_managers import settings
from hostlist import public_dns_names
from ssh_key import ssh_key_path
# Ignore known_hosts
# http://docs.fabfile.org/en/1.10/usage/env.html#disable-known-hosts
env.disable_known_hosts = True
# What remote servers should Fabric connect to? With what usernames?
env.user = 'ubuntu'
env.hosts = public_dns_names
# SSH key files to try when connecting:
# http://docs.fabfile.org/en/1.10/usage/env.html#key-filename
env.key_filename = ssh_key_path
@task
@parallel
def put_benchmark_utils():
put('benchmark_utils.py')
@task
@parallel
def prepare_backlog(num_transactions=10000):
run('python3 benchmark_utils.py add-backlog {}'.format(num_transactions))
@task
@parallel
def start_bigchaindb():
run('screen -d -m bigchaindb start &', pty=False)
@task
@parallel
def kill_bigchaindb():
run('killall bigchaindb')

View File

@ -1,20 +0,0 @@
# Transactions per second
Measure how many blocks per second are created on the _bigchain_ with a pre filled backlog.
1. Deploy an aws cluster https://docs.bigchaindb.com/projects/server/en/latest/clusters-feds/aws-testing-cluster.html
2. Make a symbolic link to hostlist.py: `ln -s ../deploy-cluster-aws/hostlist.py .`
3. Make a symbolic link to bigchaindb.pem:
```bash
mkdir pem
cd pem
ln -s ../deploy-cluster-aws/pem/bigchaindb.pem .
```
Then:
```bash
fab put_benchmark_utils
fab prepare_backlog:<num txs per node> # wait for process to finish
fab start_bigchaindb
```

View File

@ -29,8 +29,6 @@ coverage:
- "docs/*"
- "tests/*"
- "bigchaindb/version.py"
- "benchmarking-tests/*"
- "speed-tests/*"
- "ntools/*"
- "k8s/*"