2018-08-16 12:31:32 +02:00
|
|
|
# Copyright BigchainDB GmbH and BigchainDB contributors
|
|
|
|
# SPDX-License-Identifier: (Apache-2.0 AND CC-BY-4.0)
|
|
|
|
# Code is Apache-2.0 and docs are CC-BY-4.0
|
|
|
|
|
2016-04-07 14:31:34 +02:00
|
|
|
import json
|
2018-08-28 14:21:59 +02:00
|
|
|
import logging
|
2018-04-30 14:43:39 +02:00
|
|
|
|
2016-04-26 00:37:35 +02:00
|
|
|
from unittest.mock import Mock, patch
|
2017-02-24 13:44:50 +01:00
|
|
|
from argparse import Namespace
|
2016-02-14 17:18:41 +01:00
|
|
|
|
|
|
|
import pytest
|
2016-02-14 21:51:40 +01:00
|
|
|
|
2018-08-24 09:52:00 +02:00
|
|
|
from bigchaindb import ValidatorElection
|
|
|
|
from tests.conftest import node_keys
|
|
|
|
|
2016-02-14 21:51:40 +01:00
|
|
|
|
2016-09-21 00:46:48 +02:00
|
|
|
def test_make_sure_we_dont_remove_any_command():
|
|
|
|
# thanks to: http://stackoverflow.com/a/18161115/597097
|
2017-03-29 16:35:01 +02:00
|
|
|
from bigchaindb.commands.bigchaindb import create_parser
|
2016-09-21 14:10:04 +02:00
|
|
|
|
2016-09-21 00:46:48 +02:00
|
|
|
parser = create_parser()
|
|
|
|
|
2018-02-21 10:50:12 +01:00
|
|
|
assert parser.parse_args(['configure', 'localmongodb']).command
|
2016-09-21 00:46:48 +02:00
|
|
|
assert parser.parse_args(['show-config']).command
|
|
|
|
assert parser.parse_args(['init']).command
|
|
|
|
assert parser.parse_args(['drop']).command
|
|
|
|
assert parser.parse_args(['start']).command
|
2018-08-09 17:29:21 +02:00
|
|
|
assert parser.parse_args(['upsert-validator', 'new', 'TEMP_PUB_KEYPAIR', '10', 'TEMP_NODE_ID',
|
|
|
|
'--private-key', 'TEMP_PATH_TO_PRIVATE_KEY']).command
|
2018-08-24 09:52:00 +02:00
|
|
|
assert parser.parse_args(['upsert-validator', 'approve', 'ELECTION_ID', '--private-key',
|
|
|
|
'TEMP_PATH_TO_PRIVATE_KEY']).command
|
2018-09-05 10:51:32 +02:00
|
|
|
assert parser.parse_args(['upsert-validator', 'show', 'ELECTION_ID']).command
|
2016-09-21 00:46:48 +02:00
|
|
|
|
|
|
|
|
|
|
|
@patch('bigchaindb.commands.utils.start')
|
|
|
|
def test_main_entrypoint(mock_start):
|
2017-03-29 16:35:01 +02:00
|
|
|
from bigchaindb.commands.bigchaindb import main
|
2016-09-21 00:46:48 +02:00
|
|
|
main()
|
|
|
|
|
|
|
|
assert mock_start.called
|
|
|
|
|
|
|
|
|
2018-09-10 09:53:18 +02:00
|
|
|
@patch('bigchaindb.log.setup_logging')
|
|
|
|
@patch('bigchaindb.commands.bigchaindb._run_init')
|
|
|
|
@patch('bigchaindb.config_utils.autoconfigure')
|
|
|
|
def test_bigchain_run_start(mock_setup_logging, mock_run_init,
|
|
|
|
mock_autoconfigure, mock_processes_start):
|
2017-03-29 16:35:01 +02:00
|
|
|
from bigchaindb.commands.bigchaindb import run_start
|
2018-03-09 13:08:37 +01:00
|
|
|
args = Namespace(config=None, yes=True,
|
2017-11-07 11:11:55 +01:00
|
|
|
skip_initialize_database=False)
|
2016-04-26 00:37:35 +02:00
|
|
|
run_start(args)
|
2018-09-10 09:53:18 +02:00
|
|
|
assert mock_setup_logging.called
|
2016-04-26 00:37:35 +02:00
|
|
|
|
|
|
|
|
2016-02-14 19:46:30 +01:00
|
|
|
# TODO Please beware, that if debugging, the "-s" switch for pytest will
|
|
|
|
# interfere with capsys.
|
|
|
|
# See related issue: https://github.com/pytest-dev/pytest/issues/128
|
2016-12-18 08:32:10 +01:00
|
|
|
@pytest.mark.usefixtures('ignore_local_config_file')
|
2017-03-22 14:33:25 +01:00
|
|
|
def test_bigchain_show_config(capsys):
|
2017-03-29 16:35:01 +02:00
|
|
|
from bigchaindb.commands.bigchaindb import run_show_config
|
2016-04-12 16:23:09 +02:00
|
|
|
|
2016-02-14 19:46:30 +01:00
|
|
|
args = Namespace(config=None)
|
|
|
|
_, _ = capsys.readouterr()
|
|
|
|
run_show_config(args)
|
2016-04-12 16:23:09 +02:00
|
|
|
output_config = json.loads(capsys.readouterr()[0])
|
2018-03-09 13:08:37 +01:00
|
|
|
# Note: This test passed previously because we were always
|
|
|
|
# using the default configuration parameters, but since we
|
|
|
|
# are running with docker-compose now and expose parameters like
|
|
|
|
# BIGCHAINDB_SERVER_BIND, BIGCHAINDB_WSSERVER_HOST, BIGCHAINDB_WSSERVER_ADVERTISED_HOST
|
|
|
|
# the default comparison fails i.e. when config is imported at the beginning the
|
|
|
|
# dict returned is different that what is expected after run_show_config
|
|
|
|
# and run_show_config updates the bigchaindb.config
|
|
|
|
from bigchaindb import config
|
2016-04-06 17:39:44 +02:00
|
|
|
del config['CONFIGURED']
|
2016-04-12 16:23:09 +02:00
|
|
|
assert output_config == config
|
2016-02-14 20:05:16 +01:00
|
|
|
|
|
|
|
|
2017-04-12 16:38:18 +02:00
|
|
|
def test_bigchain_run_init_when_db_exists(mocker, capsys):
|
2017-03-29 16:35:01 +02:00
|
|
|
from bigchaindb.commands.bigchaindb import run_init
|
2017-04-12 16:38:18 +02:00
|
|
|
from bigchaindb.common.exceptions import DatabaseAlreadyExists
|
|
|
|
init_db_mock = mocker.patch(
|
|
|
|
'bigchaindb.commands.bigchaindb.schema.init_database',
|
|
|
|
autospec=True,
|
|
|
|
spec_set=True,
|
|
|
|
)
|
|
|
|
init_db_mock.side_effect = DatabaseAlreadyExists
|
2016-02-14 20:05:16 +01:00
|
|
|
args = Namespace(config=None)
|
|
|
|
run_init(args)
|
2017-04-12 16:38:18 +02:00
|
|
|
output_message = capsys.readouterr()[1]
|
|
|
|
print(output_message)
|
|
|
|
assert output_message == (
|
|
|
|
'The database already exists.\n'
|
|
|
|
'If you wish to re-initialize it, first drop it.\n'
|
|
|
|
)
|
2016-02-14 20:30:50 +01:00
|
|
|
|
|
|
|
|
2017-04-12 16:12:41 +02:00
|
|
|
def test__run_init(mocker):
|
|
|
|
from bigchaindb.commands.bigchaindb import _run_init
|
|
|
|
bigchain_mock = mocker.patch(
|
Refactor tendermint directory to project root (#2401)
* Problem: core.py contains an unused class, `Bigchain`
Solution: Remove core.py. Refactor BigchainDB Class to remove inheritance from Bigchain.
* Problem: core.py contains an unused class, `Bigchain`
Solution: Remove core.py. Refactor BigchainDB Class to remove inheritance from Bigchain.
* Fixed flake8 complaint about too many blank lines
* Attempting to fix Sphinx docs. This may result in some redundant commits, as I don't know what I'm doing, and I can't experiment without running the CI...
Sorry in advance!
* Attempting to fix Sphinx docs. This may result in some redundant commits, as I don't know what I'm doing, and I can't experiment without running the CI...
Sorry in advance!
* Updating from master changed BigchainDB.process_post_response to a private method, so I had to align with that.
* Fixed a couple stale references to bigchaindb.Bigchain in docstrings
* Missed a reference to `Bigchain` in a patch call...
* Problem: BigchainDB class should be part of project root
Solution: Removed the /tendermint directory and moved its contents to project root
* Problem: Flake8 complained that imports were not at the top of the file
Solution: Had to play around with the order of imports to avoid cyclic dependencies, but its working and style compliant now
* Problem: Stale reference to /tendermint directory in the index
Solution: Removed the references to /tendermint
* Problem: Flake8 complaining of unused import in __init__.py
Solution: The import is there so I can import App directly from bigchaindb, rather than from bigchaindb.core (which I think improves code readability. I added a # noqa to silence Flake8.
* Problem: Stale references to `bigchaindb.tendermint.BigchainDB` in the rst files cause Sphinx to fail
Solution: Updated the autodoc files to use `bigchaindb.BigchainDB` instead
* Problem: Stale reference to the `tendermint` directory in an @patch in a disabled test
Solution: Updated the @patch for completeness
* Problem: BigchainDB class should be part of project root
Solution: Removed the /tendermint directory and moved its contents to project root
* Problem: Flake8 complained that imports were not at the top of the file
Solution: Had to play around with the order of imports to avoid cyclic dependencies, but its working and style compliant now
* Problem: Stale reference to /tendermint directory in the index
Solution: Removed the references to /tendermint
* Problem: Flake8 complaining of unused import in __init__.py
Solution: The import is there so I can import App directly from bigchaindb, rather than from bigchaindb.core (which I think improves code readability. I added a # noqa to silence Flake8.
* Problem: Stale references to `bigchaindb.tendermint.BigchainDB` in the rst files cause Sphinx to fail
Solution: Updated the autodoc files to use `bigchaindb.BigchainDB` instead
* Problem: Stale reference to the `tendermint` directory in an @patch in a disabled test
Solution: Updated the @patch for completeness
2018-07-25 16:59:25 +02:00
|
|
|
'bigchaindb.commands.bigchaindb.bigchaindb.BigchainDB')
|
2017-04-12 16:12:41 +02:00
|
|
|
init_db_mock = mocker.patch(
|
|
|
|
'bigchaindb.commands.bigchaindb.schema.init_database',
|
|
|
|
autospec=True,
|
|
|
|
spec_set=True,
|
|
|
|
)
|
|
|
|
_run_init()
|
|
|
|
bigchain_mock.assert_called_once_with()
|
|
|
|
init_db_mock.assert_called_once_with(
|
|
|
|
connection=bigchain_mock.return_value.connection)
|
|
|
|
|
|
|
|
|
2016-12-06 14:22:13 +01:00
|
|
|
@patch('bigchaindb.backend.schema.drop_database')
|
2017-03-22 14:33:25 +01:00
|
|
|
def test_drop_db_when_assumed_yes(mock_db_drop):
|
2017-03-29 16:35:01 +02:00
|
|
|
from bigchaindb.commands.bigchaindb import run_drop
|
2016-02-14 20:30:50 +01:00
|
|
|
args = Namespace(config=None, yes=True)
|
2016-12-05 20:10:35 +01:00
|
|
|
|
|
|
|
run_drop(args)
|
|
|
|
assert mock_db_drop.called
|
|
|
|
|
|
|
|
|
2016-12-06 14:22:13 +01:00
|
|
|
@patch('bigchaindb.backend.schema.drop_database')
|
2017-03-22 14:33:25 +01:00
|
|
|
def test_drop_db_when_interactive_yes(mock_db_drop, monkeypatch):
|
2017-03-29 16:35:01 +02:00
|
|
|
from bigchaindb.commands.bigchaindb import run_drop
|
2016-12-05 20:10:35 +01:00
|
|
|
args = Namespace(config=None, yes=False)
|
2018-03-09 13:08:37 +01:00
|
|
|
monkeypatch.setattr(
|
|
|
|
'bigchaindb.commands.bigchaindb.input_on_stderr', lambda x: 'y')
|
2016-12-05 20:10:35 +01:00
|
|
|
|
|
|
|
run_drop(args)
|
|
|
|
assert mock_db_drop.called
|
|
|
|
|
|
|
|
|
2017-04-02 14:23:39 +02:00
|
|
|
@patch('bigchaindb.backend.schema.drop_database')
|
|
|
|
def test_drop_db_when_db_does_not_exist(mock_db_drop, capsys):
|
2017-04-03 09:43:22 +02:00
|
|
|
from bigchaindb import config
|
2017-04-04 10:25:24 +02:00
|
|
|
from bigchaindb.commands.bigchaindb import run_drop
|
2017-04-02 14:23:39 +02:00
|
|
|
from bigchaindb.common.exceptions import DatabaseDoesNotExist
|
|
|
|
args = Namespace(config=None, yes=True)
|
|
|
|
mock_db_drop.side_effect = DatabaseDoesNotExist
|
|
|
|
|
|
|
|
run_drop(args)
|
|
|
|
output_message = capsys.readouterr()[1]
|
2017-04-03 09:43:22 +02:00
|
|
|
assert output_message == "Cannot drop '{name}'. The database does not exist.\n".format(
|
|
|
|
name=config['database']['name'])
|
2017-04-02 14:23:39 +02:00
|
|
|
|
|
|
|
|
2016-12-06 14:22:13 +01:00
|
|
|
@patch('bigchaindb.backend.schema.drop_database')
|
2017-03-22 14:33:25 +01:00
|
|
|
def test_drop_db_does_not_drop_when_interactive_no(mock_db_drop, monkeypatch):
|
2017-03-29 16:35:01 +02:00
|
|
|
from bigchaindb.commands.bigchaindb import run_drop
|
2016-12-05 20:10:35 +01:00
|
|
|
args = Namespace(config=None, yes=False)
|
2018-03-09 13:08:37 +01:00
|
|
|
monkeypatch.setattr(
|
|
|
|
'bigchaindb.commands.bigchaindb.input_on_stderr', lambda x: 'n')
|
2016-12-05 20:10:35 +01:00
|
|
|
|
2016-02-14 20:30:50 +01:00
|
|
|
run_drop(args)
|
2016-12-05 20:10:35 +01:00
|
|
|
assert not mock_db_drop.called
|
2016-02-14 20:48:30 +01:00
|
|
|
|
|
|
|
|
2016-02-14 21:51:40 +01:00
|
|
|
# TODO Beware if you are putting breakpoints in there, and using the '-s'
|
|
|
|
# switch with pytest. It will just hang. Seems related to the monkeypatching of
|
2016-12-22 17:03:53 +01:00
|
|
|
# input_on_stderr.
|
2016-02-14 21:51:40 +01:00
|
|
|
def test_run_configure_when_config_does_not_exist(monkeypatch,
|
|
|
|
mock_write_config,
|
|
|
|
mock_generate_key_pair,
|
|
|
|
mock_bigchaindb_backup_config):
|
2017-03-29 16:35:01 +02:00
|
|
|
from bigchaindb.commands.bigchaindb import run_configure
|
2016-02-14 21:51:40 +01:00
|
|
|
monkeypatch.setattr('os.path.exists', lambda path: False)
|
2016-04-26 03:24:56 +02:00
|
|
|
monkeypatch.setattr('builtins.input', lambda: '\n')
|
2018-03-09 13:08:37 +01:00
|
|
|
args = Namespace(config=None, backend='localmongodb', yes=True)
|
2016-02-14 21:51:40 +01:00
|
|
|
return_value = run_configure(args)
|
|
|
|
assert return_value is None
|
2016-02-29 14:28:02 +01:00
|
|
|
|
|
|
|
|
|
|
|
def test_run_configure_when_config_does_exist(monkeypatch,
|
|
|
|
mock_write_config,
|
|
|
|
mock_generate_key_pair,
|
|
|
|
mock_bigchaindb_backup_config):
|
|
|
|
value = {}
|
2016-12-22 11:57:13 +01:00
|
|
|
|
2018-08-27 09:48:20 +02:00
|
|
|
def mock_write_config(newconfig):
|
2016-02-29 14:28:02 +01:00
|
|
|
value['return'] = newconfig
|
|
|
|
|
2017-03-29 16:35:01 +02:00
|
|
|
from bigchaindb.commands.bigchaindb import run_configure
|
2016-02-29 14:28:02 +01:00
|
|
|
monkeypatch.setattr('os.path.exists', lambda path: True)
|
2016-04-26 03:24:56 +02:00
|
|
|
monkeypatch.setattr('builtins.input', lambda: '\n')
|
2018-03-09 13:08:37 +01:00
|
|
|
monkeypatch.setattr(
|
|
|
|
'bigchaindb.config_utils.write_config', mock_write_config)
|
2016-02-29 14:28:02 +01:00
|
|
|
|
2018-03-09 13:08:37 +01:00
|
|
|
args = Namespace(config=None, yes=None)
|
2016-02-29 14:28:02 +01:00
|
|
|
run_configure(args)
|
|
|
|
assert value == {}
|
2016-04-26 00:37:35 +02:00
|
|
|
|
|
|
|
|
2018-04-30 14:43:39 +02:00
|
|
|
@pytest.mark.skip
|
2017-01-23 16:49:59 +01:00
|
|
|
@pytest.mark.parametrize('backend', (
|
2018-03-09 13:08:37 +01:00
|
|
|
'localmongodb',
|
2017-01-23 16:49:59 +01:00
|
|
|
))
|
|
|
|
def test_run_configure_with_backend(backend, monkeypatch, mock_write_config):
|
|
|
|
import bigchaindb
|
2017-03-29 16:35:01 +02:00
|
|
|
from bigchaindb.commands.bigchaindb import run_configure
|
2017-01-23 16:49:59 +01:00
|
|
|
|
|
|
|
value = {}
|
|
|
|
|
|
|
|
def mock_write_config(new_config, filename=None):
|
|
|
|
value['return'] = new_config
|
|
|
|
|
|
|
|
monkeypatch.setattr('os.path.exists', lambda path: False)
|
|
|
|
monkeypatch.setattr('builtins.input', lambda: '\n')
|
|
|
|
monkeypatch.setattr('bigchaindb.config_utils.write_config',
|
|
|
|
mock_write_config)
|
|
|
|
|
2018-03-09 13:08:37 +01:00
|
|
|
args = Namespace(config=None, backend=backend, yes=True)
|
2017-01-23 16:49:59 +01:00
|
|
|
expected_config = bigchaindb.config
|
|
|
|
run_configure(args)
|
|
|
|
|
|
|
|
# update the expected config with the correct backend and keypair
|
|
|
|
backend_conf = getattr(bigchaindb, '_database_' + backend)
|
|
|
|
expected_config.update({'database': backend_conf,
|
|
|
|
'keypair': value['return']['keypair']})
|
|
|
|
|
|
|
|
assert value['return'] == expected_config
|
|
|
|
|
|
|
|
|
2017-03-13 17:55:11 +01:00
|
|
|
def test_run_start_when_db_already_exists(mocker,
|
|
|
|
monkeypatch,
|
|
|
|
run_start_args,
|
|
|
|
mocked_setup_logging):
|
2017-03-29 16:35:01 +02:00
|
|
|
from bigchaindb.commands.bigchaindb import run_start
|
2017-02-28 17:30:44 +01:00
|
|
|
from bigchaindb.common.exceptions import DatabaseAlreadyExists
|
2018-09-10 09:53:18 +02:00
|
|
|
mocked_start = mocker.patch('bigchaindb.start.start')
|
2017-02-28 17:30:44 +01:00
|
|
|
|
|
|
|
def mock_run_init():
|
|
|
|
raise DatabaseAlreadyExists()
|
2018-03-09 13:08:37 +01:00
|
|
|
monkeypatch.setattr('builtins.input', lambda: '\x03')
|
2017-02-28 17:30:44 +01:00
|
|
|
monkeypatch.setattr(
|
2017-03-29 16:35:01 +02:00
|
|
|
'bigchaindb.commands.bigchaindb._run_init', mock_run_init)
|
2017-02-28 17:30:44 +01:00
|
|
|
run_start(run_start_args)
|
|
|
|
assert mocked_start.called
|
|
|
|
|
|
|
|
|
2016-09-29 10:29:41 +02:00
|
|
|
@patch('bigchaindb.commands.utils.start')
|
2018-08-27 09:48:20 +02:00
|
|
|
def test_calling_main(start_mock, monkeypatch):
|
2017-03-29 16:35:01 +02:00
|
|
|
from bigchaindb.commands.bigchaindb import main
|
2016-09-29 10:29:41 +02:00
|
|
|
|
|
|
|
argparser_mock = Mock()
|
|
|
|
parser = Mock()
|
|
|
|
subparsers = Mock()
|
|
|
|
subsubparsers = Mock()
|
|
|
|
subparsers.add_parser.return_value = subsubparsers
|
|
|
|
parser.add_subparsers.return_value = subparsers
|
|
|
|
argparser_mock.return_value = parser
|
|
|
|
monkeypatch.setattr('argparse.ArgumentParser', argparser_mock)
|
|
|
|
main()
|
|
|
|
|
|
|
|
assert argparser_mock.called is True
|
|
|
|
parser.add_subparsers.assert_called_with(title='Commands',
|
|
|
|
dest='command')
|
|
|
|
subparsers.add_parser.assert_any_call('configure',
|
2018-03-09 13:08:37 +01:00
|
|
|
help='Prepare the config file.')
|
2016-09-29 10:29:41 +02:00
|
|
|
subparsers.add_parser.assert_any_call('show-config',
|
|
|
|
help='Show the current '
|
|
|
|
'configuration')
|
|
|
|
subparsers.add_parser.assert_any_call('init', help='Init the database')
|
|
|
|
subparsers.add_parser.assert_any_call('drop', help='Drop the database')
|
2017-02-24 11:25:45 +01:00
|
|
|
|
2016-09-29 10:29:41 +02:00
|
|
|
subparsers.add_parser.assert_any_call('start', help='Start BigchainDB')
|
|
|
|
|
|
|
|
assert start_mock.called is True
|
2017-01-25 12:36:08 +01:00
|
|
|
|
|
|
|
|
2018-02-21 10:50:12 +01:00
|
|
|
@patch('bigchaindb.commands.bigchaindb.run_recover')
|
Refactor tendermint directory to project root (#2401)
* Problem: core.py contains an unused class, `Bigchain`
Solution: Remove core.py. Refactor BigchainDB Class to remove inheritance from Bigchain.
* Problem: core.py contains an unused class, `Bigchain`
Solution: Remove core.py. Refactor BigchainDB Class to remove inheritance from Bigchain.
* Fixed flake8 complaint about too many blank lines
* Attempting to fix Sphinx docs. This may result in some redundant commits, as I don't know what I'm doing, and I can't experiment without running the CI...
Sorry in advance!
* Attempting to fix Sphinx docs. This may result in some redundant commits, as I don't know what I'm doing, and I can't experiment without running the CI...
Sorry in advance!
* Updating from master changed BigchainDB.process_post_response to a private method, so I had to align with that.
* Fixed a couple stale references to bigchaindb.Bigchain in docstrings
* Missed a reference to `Bigchain` in a patch call...
* Problem: BigchainDB class should be part of project root
Solution: Removed the /tendermint directory and moved its contents to project root
* Problem: Flake8 complained that imports were not at the top of the file
Solution: Had to play around with the order of imports to avoid cyclic dependencies, but its working and style compliant now
* Problem: Stale reference to /tendermint directory in the index
Solution: Removed the references to /tendermint
* Problem: Flake8 complaining of unused import in __init__.py
Solution: The import is there so I can import App directly from bigchaindb, rather than from bigchaindb.core (which I think improves code readability. I added a # noqa to silence Flake8.
* Problem: Stale references to `bigchaindb.tendermint.BigchainDB` in the rst files cause Sphinx to fail
Solution: Updated the autodoc files to use `bigchaindb.BigchainDB` instead
* Problem: Stale reference to the `tendermint` directory in an @patch in a disabled test
Solution: Updated the @patch for completeness
* Problem: BigchainDB class should be part of project root
Solution: Removed the /tendermint directory and moved its contents to project root
* Problem: Flake8 complained that imports were not at the top of the file
Solution: Had to play around with the order of imports to avoid cyclic dependencies, but its working and style compliant now
* Problem: Stale reference to /tendermint directory in the index
Solution: Removed the references to /tendermint
* Problem: Flake8 complaining of unused import in __init__.py
Solution: The import is there so I can import App directly from bigchaindb, rather than from bigchaindb.core (which I think improves code readability. I added a # noqa to silence Flake8.
* Problem: Stale references to `bigchaindb.tendermint.BigchainDB` in the rst files cause Sphinx to fail
Solution: Updated the autodoc files to use `bigchaindb.BigchainDB` instead
* Problem: Stale reference to the `tendermint` directory in an @patch in a disabled test
Solution: Updated the @patch for completeness
2018-07-25 16:59:25 +02:00
|
|
|
@patch('bigchaindb.start.start')
|
2018-08-27 09:48:20 +02:00
|
|
|
def test_recover_db_on_start(mock_run_recover,
|
2018-02-21 10:50:12 +01:00
|
|
|
mock_start,
|
|
|
|
mocked_setup_logging):
|
|
|
|
from bigchaindb.commands.bigchaindb import run_start
|
2018-06-08 15:50:50 +02:00
|
|
|
args = Namespace(config=None, yes=True,
|
2018-02-21 10:50:12 +01:00
|
|
|
skip_initialize_database=False)
|
|
|
|
run_start(args)
|
|
|
|
|
|
|
|
assert mock_run_recover.called
|
|
|
|
assert mock_start.called
|
|
|
|
|
|
|
|
|
2018-04-18 10:46:16 +02:00
|
|
|
@pytest.mark.bdb
|
|
|
|
def test_run_recover(b, alice, bob):
|
|
|
|
from bigchaindb.commands.bigchaindb import run_recover
|
|
|
|
from bigchaindb.models import Transaction
|
Refactor tendermint directory to project root (#2401)
* Problem: core.py contains an unused class, `Bigchain`
Solution: Remove core.py. Refactor BigchainDB Class to remove inheritance from Bigchain.
* Problem: core.py contains an unused class, `Bigchain`
Solution: Remove core.py. Refactor BigchainDB Class to remove inheritance from Bigchain.
* Fixed flake8 complaint about too many blank lines
* Attempting to fix Sphinx docs. This may result in some redundant commits, as I don't know what I'm doing, and I can't experiment without running the CI...
Sorry in advance!
* Attempting to fix Sphinx docs. This may result in some redundant commits, as I don't know what I'm doing, and I can't experiment without running the CI...
Sorry in advance!
* Updating from master changed BigchainDB.process_post_response to a private method, so I had to align with that.
* Fixed a couple stale references to bigchaindb.Bigchain in docstrings
* Missed a reference to `Bigchain` in a patch call...
* Problem: BigchainDB class should be part of project root
Solution: Removed the /tendermint directory and moved its contents to project root
* Problem: Flake8 complained that imports were not at the top of the file
Solution: Had to play around with the order of imports to avoid cyclic dependencies, but its working and style compliant now
* Problem: Stale reference to /tendermint directory in the index
Solution: Removed the references to /tendermint
* Problem: Flake8 complaining of unused import in __init__.py
Solution: The import is there so I can import App directly from bigchaindb, rather than from bigchaindb.core (which I think improves code readability. I added a # noqa to silence Flake8.
* Problem: Stale references to `bigchaindb.tendermint.BigchainDB` in the rst files cause Sphinx to fail
Solution: Updated the autodoc files to use `bigchaindb.BigchainDB` instead
* Problem: Stale reference to the `tendermint` directory in an @patch in a disabled test
Solution: Updated the @patch for completeness
* Problem: BigchainDB class should be part of project root
Solution: Removed the /tendermint directory and moved its contents to project root
* Problem: Flake8 complained that imports were not at the top of the file
Solution: Had to play around with the order of imports to avoid cyclic dependencies, but its working and style compliant now
* Problem: Stale reference to /tendermint directory in the index
Solution: Removed the references to /tendermint
* Problem: Flake8 complaining of unused import in __init__.py
Solution: The import is there so I can import App directly from bigchaindb, rather than from bigchaindb.core (which I think improves code readability. I added a # noqa to silence Flake8.
* Problem: Stale references to `bigchaindb.tendermint.BigchainDB` in the rst files cause Sphinx to fail
Solution: Updated the autodoc files to use `bigchaindb.BigchainDB` instead
* Problem: Stale reference to the `tendermint` directory in an @patch in a disabled test
Solution: Updated the @patch for completeness
2018-07-25 16:59:25 +02:00
|
|
|
from bigchaindb.lib import Block, PreCommitState
|
2018-04-18 10:46:16 +02:00
|
|
|
from bigchaindb.backend.query import PRE_COMMIT_ID
|
|
|
|
from bigchaindb.backend import query
|
|
|
|
|
|
|
|
tx1 = Transaction.create([alice.public_key],
|
|
|
|
[([alice.public_key], 1)],
|
|
|
|
asset={'cycle': 'hero'},
|
|
|
|
metadata={'name': 'hohenheim'}) \
|
|
|
|
.sign([alice.private_key])
|
|
|
|
tx2 = Transaction.create([bob.public_key],
|
|
|
|
[([bob.public_key], 1)],
|
|
|
|
asset={'cycle': 'hero'},
|
|
|
|
metadata={'name': 'hohenheim'}) \
|
|
|
|
.sign([bob.private_key])
|
|
|
|
|
|
|
|
# store the transactions
|
|
|
|
b.store_bulk_transactions([tx1, tx2])
|
|
|
|
|
|
|
|
# create a random block
|
|
|
|
block8 = Block(app_hash='random_app_hash1', height=8,
|
|
|
|
transactions=['txid_doesnt_matter'])._asdict()
|
|
|
|
b.store_block(block8)
|
|
|
|
|
|
|
|
# create the next block
|
|
|
|
block9 = Block(app_hash='random_app_hash1', height=9,
|
|
|
|
transactions=[tx1.id])._asdict()
|
|
|
|
b.store_block(block9)
|
|
|
|
|
|
|
|
# create a pre_commit state which is ahead of the commit state
|
|
|
|
pre_commit_state = PreCommitState(commit_id=PRE_COMMIT_ID, height=10,
|
|
|
|
transactions=[tx2.id])._asdict()
|
|
|
|
b.store_pre_commit_state(pre_commit_state)
|
|
|
|
|
|
|
|
run_recover(b)
|
|
|
|
|
|
|
|
assert not query.get_transaction(b.connection, tx2.id)
|
|
|
|
|
|
|
|
|
2018-02-21 10:50:12 +01:00
|
|
|
# Helper
|
|
|
|
class MockResponse():
|
|
|
|
|
|
|
|
def __init__(self, height):
|
|
|
|
self.height = height
|
|
|
|
|
|
|
|
def json(self):
|
|
|
|
return {'result': {'latest_block_height': self.height}}
|
2018-03-29 18:25:26 +02:00
|
|
|
|
|
|
|
|
2018-08-30 10:47:37 +02:00
|
|
|
@pytest.mark.abci
|
|
|
|
def test_upsert_validator_new_with_tendermint(b, priv_validator_path, user_sk, validators):
|
2018-08-09 17:29:21 +02:00
|
|
|
from bigchaindb.commands.bigchaindb import run_upsert_validator_new
|
2018-03-29 18:25:26 +02:00
|
|
|
|
2018-08-30 10:47:37 +02:00
|
|
|
new_args = Namespace(action='new',
|
2018-09-06 16:06:36 +02:00
|
|
|
public_key='HHG0IQRybpT6nJMIWWFWhMczCLHt6xcm7eP52GnGuPY=',
|
2018-08-30 10:47:37 +02:00
|
|
|
power=1,
|
|
|
|
node_id='unique_node_id_for_test_upsert_validator_new_with_tendermint',
|
|
|
|
sk=priv_validator_path,
|
|
|
|
config={})
|
2018-03-29 18:25:26 +02:00
|
|
|
|
2018-08-30 10:47:37 +02:00
|
|
|
election_id = run_upsert_validator_new(new_args, b)
|
2018-08-09 17:29:21 +02:00
|
|
|
|
2018-08-30 10:47:37 +02:00
|
|
|
assert b.get_transaction(election_id)
|
2018-08-09 17:29:21 +02:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.bdb
|
2018-08-28 14:21:59 +02:00
|
|
|
def test_upsert_validator_new_without_tendermint(caplog, b, priv_validator_path, user_sk):
|
2018-08-09 17:29:21 +02:00
|
|
|
from bigchaindb.commands.bigchaindb import run_upsert_validator_new
|
|
|
|
|
|
|
|
def mock_write(tx, mode):
|
2018-08-22 10:48:43 +02:00
|
|
|
b.store_bulk_transactions([tx])
|
2018-08-09 17:29:21 +02:00
|
|
|
return (202, '')
|
|
|
|
|
2018-08-30 10:47:37 +02:00
|
|
|
b.get_validators = mock_get_validators
|
2018-08-09 17:29:21 +02:00
|
|
|
b.write_transaction = mock_write
|
|
|
|
|
|
|
|
args = Namespace(action='new',
|
|
|
|
public_key='CJxdItf4lz2PwEf4SmYNAu/c/VpmX39JEgC5YpH7fxg=',
|
|
|
|
power=1,
|
2018-08-28 14:21:59 +02:00
|
|
|
node_id='fb7140f03a4ffad899fabbbf655b97e0321add66',
|
2018-08-09 17:29:21 +02:00
|
|
|
sk=priv_validator_path,
|
|
|
|
config={})
|
|
|
|
|
2018-08-28 14:21:59 +02:00
|
|
|
with caplog.at_level(logging.INFO):
|
|
|
|
election_id = run_upsert_validator_new(args, b)
|
|
|
|
assert caplog.records[0].msg == '[SUCCESS] Submitted proposal with id: ' + election_id
|
|
|
|
assert b.get_transaction(election_id)
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.bdb
|
|
|
|
def test_upsert_validator_new_invalid_election(caplog, b, priv_validator_path, user_sk):
|
|
|
|
from bigchaindb.commands.bigchaindb import run_upsert_validator_new
|
|
|
|
|
|
|
|
args = Namespace(action='new',
|
|
|
|
public_key='CJxdItf4lz2PwEf4SmYNAu/c/VpmX39JEgC5YpH7fxg=',
|
|
|
|
power=10,
|
|
|
|
node_id='fb7140f03a4ffad899fabbbf655b97e0321add66',
|
|
|
|
sk='/tmp/invalid/path/key.json',
|
|
|
|
config={})
|
|
|
|
|
|
|
|
with caplog.at_level(logging.ERROR):
|
|
|
|
assert not run_upsert_validator_new(args, b)
|
|
|
|
assert caplog.records[0].msg.__class__ == FileNotFoundError
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.bdb
|
|
|
|
def test_upsert_validator_new_election_invalid_power(caplog, b, priv_validator_path, user_sk):
|
|
|
|
from bigchaindb.commands.bigchaindb import run_upsert_validator_new
|
|
|
|
from bigchaindb.common.exceptions import InvalidPowerChange
|
|
|
|
|
|
|
|
def mock_write(tx, mode):
|
|
|
|
b.store_bulk_transactions([tx])
|
|
|
|
return (400, '')
|
|
|
|
|
|
|
|
b.write_transaction = mock_write
|
2018-08-30 10:47:37 +02:00
|
|
|
b.get_validators = mock_get_validators
|
2018-08-28 14:21:59 +02:00
|
|
|
args = Namespace(action='new',
|
|
|
|
public_key='CJxdItf4lz2PwEf4SmYNAu/c/VpmX39JEgC5YpH7fxg=',
|
|
|
|
power=10,
|
|
|
|
node_id='fb7140f03a4ffad899fabbbf655b97e0321add66',
|
|
|
|
sk=priv_validator_path,
|
|
|
|
config={})
|
|
|
|
|
|
|
|
with caplog.at_level(logging.ERROR):
|
|
|
|
assert not run_upsert_validator_new(args, b)
|
|
|
|
assert caplog.records[0].msg.__class__ == InvalidPowerChange
|
2018-08-24 09:52:00 +02:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.abci
|
|
|
|
def test_upsert_validator_approve_with_tendermint(b, priv_validator_path, user_sk, validators):
|
2018-08-28 14:21:59 +02:00
|
|
|
from bigchaindb.commands.bigchaindb import (run_upsert_validator_new,
|
|
|
|
run_upsert_validator_approve)
|
2018-08-24 09:52:00 +02:00
|
|
|
|
2018-08-28 14:21:59 +02:00
|
|
|
public_key = 'CJxdItf4lz2PwEf4SmYNAu/c/VpmX39JEgC5YpH7fxg='
|
2018-08-24 09:52:00 +02:00
|
|
|
new_args = Namespace(action='new',
|
|
|
|
public_key=public_key,
|
|
|
|
power=1,
|
2018-08-28 14:21:59 +02:00
|
|
|
node_id='fb7140f03a4ffad899fabbbf655b97e0321add66',
|
2018-08-24 09:52:00 +02:00
|
|
|
sk=priv_validator_path,
|
|
|
|
config={})
|
|
|
|
|
|
|
|
election_id = run_upsert_validator_new(new_args, b)
|
2018-09-06 16:06:36 +02:00
|
|
|
assert election_id
|
2018-08-24 09:52:00 +02:00
|
|
|
|
|
|
|
args = Namespace(action='approve',
|
|
|
|
election_id=election_id,
|
|
|
|
sk=priv_validator_path,
|
|
|
|
config={})
|
|
|
|
approve = run_upsert_validator_approve(args, b)
|
|
|
|
|
|
|
|
assert b.get_transaction(approve)
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.bdb
|
2018-08-28 14:21:59 +02:00
|
|
|
def test_upsert_validator_approve_without_tendermint(caplog, b, priv_validator_path, new_validator, node_key):
|
2018-08-24 09:52:00 +02:00
|
|
|
from bigchaindb.commands.bigchaindb import run_upsert_validator_approve
|
|
|
|
from argparse import Namespace
|
|
|
|
|
|
|
|
b, election_id = call_election(b, new_validator, node_key)
|
|
|
|
|
|
|
|
# call run_upsert_validator_approve with args that point to the election
|
|
|
|
args = Namespace(action='approve',
|
|
|
|
election_id=election_id,
|
|
|
|
sk=priv_validator_path,
|
|
|
|
config={})
|
|
|
|
|
|
|
|
# assert returned id is in the db
|
2018-08-28 14:21:59 +02:00
|
|
|
with caplog.at_level(logging.INFO):
|
|
|
|
approval_id = run_upsert_validator_approve(args, b)
|
|
|
|
assert caplog.records[0].msg == '[SUCCESS] Your vote has been submitted'
|
|
|
|
assert b.get_transaction(approval_id)
|
2018-08-24 09:52:00 +02:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.bdb
|
2018-08-28 14:21:59 +02:00
|
|
|
def test_upsert_validator_approve_failure(caplog, b, priv_validator_path, new_validator, node_key):
|
|
|
|
from bigchaindb.commands.bigchaindb import run_upsert_validator_approve
|
|
|
|
from argparse import Namespace
|
|
|
|
|
|
|
|
b, election_id = call_election(b, new_validator, node_key)
|
|
|
|
|
|
|
|
def mock_write(tx, mode):
|
|
|
|
b.store_bulk_transactions([tx])
|
|
|
|
return (400, '')
|
|
|
|
|
|
|
|
b.write_transaction = mock_write
|
|
|
|
|
|
|
|
# call run_upsert_validator_approve with args that point to the election
|
|
|
|
args = Namespace(action='approve',
|
|
|
|
election_id=election_id,
|
|
|
|
sk=priv_validator_path,
|
|
|
|
config={})
|
|
|
|
|
|
|
|
with caplog.at_level(logging.ERROR):
|
|
|
|
assert not run_upsert_validator_approve(args, b)
|
|
|
|
assert caplog.records[0].msg == 'Failed to commit vote'
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.bdb
|
|
|
|
def test_upsert_validator_approve_called_with_bad_key(caplog, b, bad_validator_path, new_validator, node_key):
|
2018-08-24 09:52:00 +02:00
|
|
|
from bigchaindb.commands.bigchaindb import run_upsert_validator_approve
|
|
|
|
from argparse import Namespace
|
|
|
|
|
|
|
|
b, election_id = call_election(b, new_validator, node_key)
|
|
|
|
|
|
|
|
# call run_upsert_validator_approve with args that point to the election, but a bad signing key
|
|
|
|
args = Namespace(action='approve',
|
|
|
|
election_id=election_id,
|
|
|
|
sk=bad_validator_path,
|
|
|
|
config={})
|
|
|
|
|
2018-08-28 14:21:59 +02:00
|
|
|
with caplog.at_level(logging.ERROR):
|
|
|
|
assert not run_upsert_validator_approve(args, b)
|
|
|
|
assert caplog.records[0].msg == 'The key you provided does not match any of '\
|
|
|
|
'the eligible voters in this election.'
|
2018-08-24 09:52:00 +02:00
|
|
|
|
|
|
|
|
2018-08-30 10:47:37 +02:00
|
|
|
def mock_get_validators(height):
|
2018-08-24 09:52:00 +02:00
|
|
|
keys = node_keys()
|
|
|
|
pub_key = list(keys.keys())[0]
|
|
|
|
return [
|
2018-09-06 16:06:36 +02:00
|
|
|
{'public_key': {'value': pub_key,
|
|
|
|
'type': 'ed25519-base64'},
|
2018-08-24 09:52:00 +02:00
|
|
|
'voting_power': 10}
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
|
|
def call_election(b, new_validator, node_key):
|
|
|
|
|
|
|
|
def mock_write(tx, mode):
|
|
|
|
b.store_bulk_transactions([tx])
|
|
|
|
return (202, '')
|
|
|
|
|
|
|
|
# patch the validator set. We now have one validator with power 10
|
2018-08-30 10:47:37 +02:00
|
|
|
b.get_validators = mock_get_validators
|
2018-08-24 09:52:00 +02:00
|
|
|
b.write_transaction = mock_write
|
|
|
|
|
|
|
|
# our voters is a list of length 1, populated from our mocked validator
|
|
|
|
voters = ValidatorElection.recipients(b)
|
|
|
|
# and our voter is the public key from the voter list
|
|
|
|
voter = node_key.public_key
|
|
|
|
valid_election = ValidatorElection.generate([voter],
|
|
|
|
voters,
|
|
|
|
new_validator, None).sign([node_key.private_key])
|
|
|
|
|
|
|
|
# patch in an election with a vote issued to the user
|
|
|
|
election_id = valid_election.id
|
|
|
|
b.store_bulk_transactions([valid_election])
|
|
|
|
|
|
|
|
return b, election_id
|