1
0
mirror of https://github.com/bigchaindb/bigchaindb.git synced 2024-06-28 00:27:45 +02:00

Merge pull request #1460 from bigchaindb/feat/1275/asset-decoupling

Feat/1275/asset decoupling
This commit is contained in:
Rodolphe Marques 2017-05-29 12:57:11 +02:00 committed by GitHub
commit 45a10a2577
16 changed files with 486 additions and 42 deletions

View File

@ -7,7 +7,7 @@ from pymongo import ReturnDocument
from bigchaindb import backend
from bigchaindb.common.exceptions import CyclicBlockchainError
from bigchaindb.common.transaction import Transaction
from bigchaindb.backend.exceptions import DuplicateKeyError
from bigchaindb.backend.exceptions import DuplicateKeyError, OperationError
from bigchaindb.backend.utils import module_dispatch_registrar
from bigchaindb.backend.mongodb.connection import MongoDBConnection
@ -127,6 +127,7 @@ def get_txids_filtered(conn, asset_id, operation=None):
return (elem['block']['transactions']['id'] for elem in cursor)
# TODO: This doesn't seem to be used anywhere
@register_query(MongoDBConnection)
def get_asset_by_id(conn, asset_id):
cursor = conn.run(
@ -232,10 +233,10 @@ def get_votes_by_block_id_and_voter(conn, block_id, node_pubkey):
@register_query(MongoDBConnection)
def write_block(conn, block):
def write_block(conn, block_dict):
return conn.run(
conn.collection('bigchain')
.insert_one(block.to_dict()))
.insert_one(block_dict))
@register_query(MongoDBConnection)
@ -246,6 +247,31 @@ def get_block(conn, block_id):
projection={'_id': False}))
@register_query(MongoDBConnection)
def write_assets(conn, assets):
try:
# unordered means that all the inserts will be attempted instead of
# stopping after the first error.
return conn.run(
conn.collection('assets')
.insert_many(assets, ordered=False))
# This can happen if we try to write the same asset multiple times.
# One case is when we write the same transaction into multiple blocks due
# to invalid blocks.
# The actual mongodb exception is a BulkWriteError due to a duplicated key
# in one of the inserts.
except OperationError:
return
@register_query(MongoDBConnection)
def get_assets(conn, asset_ids):
return conn.run(
conn.collection('assets')
.find({'id': {'$in': asset_ids}},
projection={'_id': False}))
@register_query(MongoDBConnection)
def count_blocks(conn):
return conn.run(
@ -278,7 +304,7 @@ def get_genesis_block(conn):
@register_query(MongoDBConnection)
def get_last_voted_block(conn, node_pubkey):
def get_last_voted_block_id(conn, node_pubkey):
last_voted = conn.run(
conn.collection('votes')
.find({'node_pubkey': node_pubkey},
@ -287,7 +313,7 @@ def get_last_voted_block(conn, node_pubkey):
# pymongo seems to return a cursor even if there are no results
# so we actually need to check the count
if last_voted.count() == 0:
return get_genesis_block(conn)
return get_genesis_block(conn)['id']
mapping = {v['vote']['previous_block']: v['vote']['voting_for_block']
for v in last_voted}
@ -305,7 +331,7 @@ def get_last_voted_block(conn, node_pubkey):
except KeyError:
break
return get_block(conn, last_block_id)
return last_block_id
@register_query(MongoDBConnection)

View File

@ -27,7 +27,7 @@ def create_database(conn, dbname):
@register_schema(MongoDBConnection)
def create_tables(conn, dbname):
for table_name in ['bigchain', 'backlog', 'votes']:
for table_name in ['bigchain', 'backlog', 'votes', 'assets']:
logger.info('Create `%s` table.', table_name)
# create the table
# TODO: read and write concerns can be declared here
@ -39,6 +39,7 @@ def create_indexes(conn, dbname):
create_bigchain_secondary_index(conn, dbname)
create_backlog_secondary_index(conn, dbname)
create_votes_secondary_index(conn, dbname)
create_assets_secondary_index(conn, dbname)
@register_schema(MongoDBConnection)
@ -102,3 +103,13 @@ def create_votes_secondary_index(conn, dbname):
ASCENDING)],
name='block_and_voter',
unique=True)
def create_assets_secondary_index(conn, dbname):
logger.info('Create `assets` secondary index.')
# unique index on the id of the asset.
# the id is the txid of the transaction that created the asset
conn.conn[dbname]['assets'].create_index('id',
name='asset_id',
unique=True)

View File

@ -239,6 +239,33 @@ def get_block(connection, block_id):
raise NotImplementedError
@singledispatch
def write_assets(connection, assets):
"""Write a list of assets to the assets table.
Args:
assets (list): a list of assets to write.
Returns:
The database response.
"""
raise NotImplementedError
@singledispatch
def get_assets(connection, asset_ids):
"""Get a list of assets from the assets table.
Args:
asset_ids (list): a list of ids for the assets to be retrieved from
the database.
Returns:
assets (list): the list of returned assets.
"""
raise NotImplementedError
@singledispatch
def count_blocks(connection):
"""Count the number of blocks in the bigchain table.
@ -287,15 +314,15 @@ def get_genesis_block(connection):
@singledispatch
def get_last_voted_block(connection, node_pubkey):
def get_last_voted_block_id(connection, node_pubkey):
"""Get the last voted block for a specific node.
Args:
node_pubkey (str): base58 encoded public key.
Returns:
The last block the node has voted on. If the node didn't cast
any vote then the genesis block is returned.
The id of the last block the node has voted on. If the node didn't cast
any vote then the genesis block id is returned.
"""
raise NotImplementedError

View File

@ -6,6 +6,7 @@ import rethinkdb as r
from bigchaindb import backend, utils
from bigchaindb.common import exceptions
from bigchaindb.common.transaction import Transaction
from bigchaindb.common.utils import serialize
from bigchaindb.backend.utils import module_dispatch_registrar
from bigchaindb.backend.rethinkdb.connection import RethinkDBConnection
@ -148,10 +149,10 @@ def get_votes_by_block_id_and_voter(connection, block_id, node_pubkey):
@register_query(RethinkDBConnection)
def write_block(connection, block):
def write_block(connection, block_dict):
return connection.run(
r.table('bigchain')
.insert(r.json(block.to_str()), durability=WRITE_DURABILITY))
.insert(r.json(serialize(block_dict)), durability=WRITE_DURABILITY))
@register_query(RethinkDBConnection)
@ -159,6 +160,20 @@ def get_block(connection, block_id):
return connection.run(r.table('bigchain').get(block_id))
@register_query(RethinkDBConnection)
def write_assets(connection, assets):
return connection.run(
r.table('assets')
.insert(assets, durability=WRITE_DURABILITY))
@register_query(RethinkDBConnection)
def get_assets(connection, asset_ids):
return connection.run(
r.table('assets', read_mode=READ_MODE)
.get_all(*asset_ids))
@register_query(RethinkDBConnection)
def count_blocks(connection):
return connection.run(
@ -189,7 +204,7 @@ def get_genesis_block(connection):
@register_query(RethinkDBConnection)
def get_last_voted_block(connection, node_pubkey):
def get_last_voted_block_id(connection, node_pubkey):
try:
# get the latest value for the vote timestamp (over all votes)
max_timestamp = connection.run(
@ -204,7 +219,7 @@ def get_last_voted_block(connection, node_pubkey):
except r.ReqlNonExistenceError:
# return last vote if last vote exists else return Genesis block
return get_genesis_block(connection)
return get_genesis_block(connection)['id']
# Now the fun starts. Since the resolution of timestamp is a second,
# we might have more than one vote per timestamp. If this is the case
@ -236,9 +251,7 @@ def get_last_voted_block(connection, node_pubkey):
except KeyError:
break
return connection.run(
r.table('bigchain', read_mode=READ_MODE)
.get(last_block_id))
return last_block_id
@register_query(RethinkDBConnection)

View File

@ -23,7 +23,7 @@ def create_database(connection, dbname):
@register_schema(RethinkDBConnection)
def create_tables(connection, dbname):
for table_name in ['bigchain', 'backlog', 'votes']:
for table_name in ['bigchain', 'backlog', 'votes', 'assets']:
logger.info('Create `%s` table.', table_name)
connection.run(r.db(dbname).table_create(table_name))

View File

@ -182,15 +182,23 @@ class Bigchain(object):
include_status (bool): also return the status of the block
the return value is then a tuple: (block, status)
"""
block = backend.query.get_block(self.connection, block_id)
status = None
# get block from database
block_dict = backend.query.get_block(self.connection, block_id)
# get the asset ids from the block
if block_dict:
asset_ids = Block.get_asset_ids(block_dict)
# get the assets from the database
assets = self.get_assets(asset_ids)
# add the assets to the block transactions
block_dict = Block.couple_assets(block_dict, assets)
status = None
if include_status:
if block:
status = self.block_election_status(block)
return block, status
if block_dict:
status = self.block_election_status(block_dict)
return block_dict, status
else:
return block
return block_dict
def get_transaction(self, txid, include_status=False):
"""Get the transaction with the specified `txid` (and optionally its status)
@ -250,7 +258,13 @@ class Bigchain(object):
tx_status = self.TX_IN_BACKLOG
if response:
response = Transaction.from_dict(response)
if tx_status == self.TX_IN_BACKLOG:
response = Transaction.from_dict(response)
else:
# If we are reading from the bigchain collection the asset is
# not in the transaction so we need to fetch the asset and
# reconstruct the transaction.
response = Transaction.from_db(self, response)
if include_status:
return response, tx_status
@ -470,7 +484,14 @@ class Bigchain(object):
block (Block): block to write to bigchain.
"""
return backend.query.write_block(self.connection, block)
# Decouple assets from block
assets, block_dict = block.decouple_assets()
# write the assets
if assets:
self.write_assets(assets)
# write the block
return backend.query.write_block(self.connection, block_dict)
def prepare_genesis_block(self):
"""Prepare a genesis block."""
@ -549,7 +570,9 @@ class Bigchain(object):
def get_last_voted_block(self):
"""Returns the last block that this node voted on."""
return Block.from_dict(backend.query.get_last_voted_block(self.connection, self.me))
last_block_id = backend.query.get_last_voted_block_id(self.connection,
self.me)
return Block.from_dict(self.get_block(last_block_id))
def get_unvoted_blocks(self):
"""Return all the blocks that have not been voted on by this node.
@ -573,3 +596,26 @@ class Bigchain(object):
"""Tally the votes on a block, and return the status:
valid, invalid, or undecided."""
return self.block_election(block)['status']
def get_assets(self, asset_ids):
"""
Return a list of assets that match the asset_ids
Args:
asset_ids (:obj:`list` of :obj:`str`): A list of asset_ids to
retrieve from the database.
Returns:
list: The list of assets returned from the database.
"""
return backend.query.get_assets(self.connection, asset_ids)
def write_assets(self, assets):
"""
Writes a list of assets into the database.
Args:
assets (:obj:`list` of :obj:`dict`): A list of assets to write to
the database.
"""
return backend.query.write_assets(self.connection, assets)

View File

@ -1,3 +1,5 @@
from copy import deepcopy
from bigchaindb.common.crypto import hash_data, PublicKey, PrivateKey
from bigchaindb.common.exceptions import (InvalidHash, InvalidSignature,
DoubleSpend, InputDoesNotExist,
@ -84,6 +86,31 @@ class Transaction(Transaction):
validate_transaction_schema(tx_body)
return super().from_dict(tx_body)
@classmethod
def from_db(cls, bigchain, tx_dict):
"""
Helper method that reconstructs a transaction dict that was returned
from the database. It checks what asset_id to retrieve, retrieves the
asset from the asset table and reconstructs the transaction.
Args:
bigchain (:class:`~bigchaindb.Bigchain`): An instance of Bigchain
used to perform database queries.
tx_dict (:obj:`dict`): The transaction dict as returned from the
database.
Returns:
:class:`~Transaction`
"""
if tx_dict['operation'] in [Transaction.CREATE, Transaction.GENESIS]:
# TODO: Maybe replace this call to a call to get_asset_by_id
asset = list(bigchain.get_assets([tx_dict['id']]))[0]
del asset['id']
tx_dict.update({'asset': asset})
return cls.from_dict(tx_dict)
class Block(object):
"""Bundle a list of Transactions in a Block. Nodes vote on its validity.
@ -300,5 +327,95 @@ class Block(object):
'signature': self.signature,
}
@classmethod
def from_db(cls, bigchain, block_dict):
"""
Helper method that reconstructs a block_dict that was returned from
the database. It checks what asset_ids to retrieve, retrieves the
assets from the assets table and reconstructs the block.
Args:
bigchain (:class:`~bigchaindb.Bigchain`): An instance of Bigchain
used to perform database queries.
block_dict(:obj:`dict`): The block dict as returned from the
database.
Returns:
:class:`~Block`
"""
asset_ids = cls.get_asset_ids(block_dict)
assets = bigchain.get_assets(asset_ids)
block_dict = cls.couple_assets(block_dict, assets)
return cls.from_dict(block_dict)
def decouple_assets(self):
"""
Extracts the assets from the ``CREATE`` transactions in the block.
Returns:
tuple: (assets, block) with the assets being a list of dicts and
the block being the dict of the block with no assets in the CREATE
transactions.
"""
block_dict = deepcopy(self.to_dict())
assets = []
for transaction in block_dict['block']['transactions']:
if transaction['operation'] in [Transaction.CREATE,
Transaction.GENESIS]:
asset = transaction.pop('asset')
asset.update({'id': transaction['id']})
assets.append(asset)
return (assets, block_dict)
@staticmethod
def couple_assets(block_dict, assets):
"""
Given a block_dict with no assets (as returned from a database call)
and a list of assets, reconstruct the original block by putting the
assets back into the ``CREATE`` transactions in the block.
Args:
block_dict (:obj:`dict`): The block dict as returned from a
database call.
assets (:obj:`list` of :obj:`dict`): A list of assets returned from
a database call.
Returns:
dict: The dict of the reconstructed block.
"""
# create a dict with {'<txid>': asset}
assets = {asset.pop('id'): asset for asset in assets}
# add the assets to the block transactions
for transaction in block_dict['block']['transactions']:
if transaction['operation'] in [Transaction.CREATE,
Transaction.GENESIS]:
transaction.update({'asset': assets.get(transaction['id'])})
return block_dict
@staticmethod
def get_asset_ids(block_dict):
"""
Given a block_dict return all the asset_ids for that block (the txid
of CREATE transactions). Useful to know which assets to retrieve
from the database to reconstruct the block.
Args:
block_dict (:obj:`dict`): The block dict as returned from a
database call.
Returns:
list: The list of asset_ids in the block.
"""
asset_ids = []
for transaction in block_dict['block']['transactions']:
if transaction['operation'] in [Transaction.CREATE,
Transaction.GENESIS]:
asset_ids.append(transaction['id'])
return asset_ids
def to_str(self):
return serialize(self.to_dict())

View File

@ -50,7 +50,7 @@ class Vote:
def validate_block(self, block):
if not self.bigchain.has_previous_vote(block['id']):
try:
block = Block.from_dict(block)
block = Block.from_db(self.bigchain, block)
except (exceptions.InvalidHash):
# XXX: if a block is invalid we should skip the `validate_tx`
# step, but since we are in a pipeline we cannot just jump to

View File

@ -1,4 +1,7 @@
from copy import deepcopy
import pytest
import pymongo
pytestmark = pytest.mark.bdb
@ -269,7 +272,7 @@ def test_write_block(signed_create_tx):
# create and write block
block = Block(transactions=[signed_create_tx])
query.write_block(conn, block)
query.write_block(conn, block.to_dict())
block_db = conn.db.bigchain.find_one({'id': block.id}, {'_id': False})
@ -347,17 +350,18 @@ def test_get_genesis_block(genesis_block):
from bigchaindb.backend import connect, query
conn = connect()
assert query.get_genesis_block(conn) == genesis_block.to_dict()
assets, genesis_block_dict = genesis_block.decouple_assets()
assert query.get_genesis_block(conn) == genesis_block_dict
def test_get_last_voted_block(genesis_block, signed_create_tx, b):
def test_get_last_voted_block_id(genesis_block, signed_create_tx, b):
from bigchaindb.backend import connect, query
from bigchaindb.models import Block
from bigchaindb.common.exceptions import CyclicBlockchainError
conn = connect()
# check that the last voted block is the genesis block
assert query.get_last_voted_block(conn, b.me) == genesis_block.to_dict()
assert query.get_last_voted_block_id(conn, b.me) == genesis_block.id
# create and insert a new vote and block
block = Block(transactions=[signed_create_tx])
@ -365,7 +369,7 @@ def test_get_last_voted_block(genesis_block, signed_create_tx, b):
vote = b.vote(block.id, genesis_block.id, True)
conn.db.votes.insert_one(vote)
assert query.get_last_voted_block(conn, b.me) == block.to_dict()
assert query.get_last_voted_block_id(conn, b.me) == block.id
# force a bad chain
vote.pop('_id')
@ -374,7 +378,7 @@ def test_get_last_voted_block(genesis_block, signed_create_tx, b):
conn.db.votes.insert_one(vote)
with pytest.raises(CyclicBlockchainError):
query.get_last_voted_block(conn, b.me)
query.get_last_voted_block_id(conn, b.me)
def test_get_unvoted_blocks(signed_create_tx):
@ -466,3 +470,46 @@ def test_get_votes_for_blocks_by_voter():
conn.db.votes.insert_one(vote.copy())
res = query.get_votes_for_blocks_by_voter(conn, ['block1', 'block2'], 'a')
assert list(res) == [votes[0], votes[2]]
def test_write_assets():
from bigchaindb.backend import connect, query
conn = connect()
assets = [
{'id': 1, 'data': '1'},
{'id': 2, 'data': '2'},
{'id': 3, 'data': '3'},
# Duplicated id. Should not be written to the database
{'id': 1, 'data': '1'},
]
# write the assets
query.write_assets(conn, deepcopy(assets))
# check that 3 assets were written to the database
cursor = conn.db.assets.find({}, projection={'_id': False})\
.sort('id', pymongo.ASCENDING)
assert cursor.count() == 3
assert list(cursor) == assets[:-1]
def test_get_assets():
from bigchaindb.backend import connect, query
conn = connect()
assets = [
{'id': 1, 'data': '1'},
{'id': 2, 'data': '2'},
{'id': 3, 'data': '3'},
]
# write the assets
conn.db.assets.insert_many(deepcopy(assets), ordered=False)
# read only 2 assets
cursor = query.get_assets(conn, [1, 3])
assert cursor.count() == 2
assert list(cursor.sort('id', pymongo.ASCENDING)) == assets[::2]

View File

@ -18,7 +18,8 @@ def test_init_creates_db_tables_and_indexes():
init_database()
collection_names = conn.conn[dbname].collection_names()
assert sorted(collection_names) == ['backlog', 'bigchain', 'votes']
assert sorted(collection_names) == ['assets', 'backlog', 'bigchain',
'votes']
indexes = conn.conn[dbname]['bigchain'].index_information().keys()
assert sorted(indexes) == ['_id_', 'asset_id', 'block_timestamp', 'inputs',
@ -31,6 +32,9 @@ def test_init_creates_db_tables_and_indexes():
indexes = conn.conn[dbname]['votes'].index_information().keys()
assert sorted(indexes) == ['_id_', 'block_and_voter']
indexes = conn.conn[dbname]['assets'].index_information().keys()
assert sorted(indexes) == ['_id_', 'asset_id']
def test_init_database_fails_if_db_exists():
import bigchaindb
@ -62,7 +66,8 @@ def test_create_tables():
schema.create_tables(conn, dbname)
collection_names = conn.conn[dbname].collection_names()
assert sorted(collection_names) == ['backlog', 'bigchain', 'votes']
assert sorted(collection_names) == ['assets', 'backlog', 'bigchain',
'votes']
def test_create_secondary_indexes():

View File

@ -63,7 +63,8 @@ def test_create_tables():
assert conn.run(r.db(dbname).table_list().contains('bigchain')) is True
assert conn.run(r.db(dbname).table_list().contains('backlog')) is True
assert conn.run(r.db(dbname).table_list().contains('votes')) is True
assert len(conn.run(r.db(dbname).table_list())) == 3
assert conn.run(r.db(dbname).table_list().contains('assets')) is True
assert len(conn.run(r.db(dbname).table_list())) == 4
@pytest.mark.bdb

View File

@ -30,7 +30,7 @@ def test_schema(schema_func_name, args_qty):
('write_block', 1),
('get_block', 1),
('write_vote', 1),
('get_last_voted_block', 1),
('get_last_voted_block_id', 1),
('get_unvoted_blocks', 1),
('get_spent', 2),
('get_votes_by_block_id_and_voter', 2),
@ -38,6 +38,8 @@ def test_schema(schema_func_name, args_qty):
('get_transaction_from_block', 2),
('get_votes_for_blocks_by_voter', 2),
('get_spending_transactions', 1),
('write_assets', 1),
('get_assets', 1),
))
def test_query(query_func_name, args_qty):
from bigchaindb.backend import query

View File

@ -383,7 +383,7 @@ class TestBigchainApi(object):
from bigchaindb.backend import query
genesis = query.get_genesis_block(b.connection)
genesis = Block.from_dict(genesis)
genesis = Block.from_db(b, genesis)
gb = b.get_last_voted_block()
assert gb == genesis
assert b.validate_block(gb) == gb

View File

@ -20,6 +20,15 @@ def dummy_block(b):
return block
def decouple_assets(b, block):
# the block comming from the database does not contain the assets
# so we need to pass the block without the assets and store the assets
# so that the voting pipeline can reconstruct it
assets, block_dict = block.decouple_assets()
b.write_assets(assets)
return block_dict
DUMMY_SHA3 = '0123456789abcdef' * 4
@ -79,9 +88,10 @@ def test_vote_validate_block(b):
tx = dummy_tx(b)
block = b.create_block([tx])
block_dict = decouple_assets(b, block)
vote_obj = vote.Vote()
validation = vote_obj.validate_block(block.to_dict())
validation = vote_obj.validate_block(block_dict)
assert validation[0] == block.id
for tx1, tx2 in zip(validation[1], block.transactions):
assert tx1 == tx2
@ -220,8 +230,9 @@ def test_valid_block_voting_multiprocessing(b, genesis_block, monkeypatch):
vote_pipeline.setup(indata=inpipe, outdata=outpipe)
block = dummy_block(b)
block_dict = decouple_assets(b, block)
inpipe.put(block.to_dict())
inpipe.put(block_dict)
vote_pipeline.start()
vote_out = outpipe.get()
vote_pipeline.terminate()
@ -257,6 +268,7 @@ def test_valid_block_voting_with_create_transaction(b,
monkeypatch.setattr('time.time', lambda: 1111111111)
block = b.create_block([tx])
block_dict = decouple_assets(b, block)
inpipe = Pipe()
outpipe = Pipe()
@ -264,7 +276,7 @@ def test_valid_block_voting_with_create_transaction(b,
vote_pipeline = vote.create_pipeline()
vote_pipeline.setup(indata=inpipe, outdata=outpipe)
inpipe.put(block.to_dict())
inpipe.put(block_dict)
vote_pipeline.start()
vote_out = outpipe.get()
vote_pipeline.terminate()

View File

@ -1,3 +1,4 @@
import pytest
from pytest import raises
@ -153,3 +154,137 @@ class TestBlockModel(object):
block = b.create_block([tx, tx])
with raises(DuplicateTransaction):
block._validate_block(b)
def test_decouple_assets(self, b):
from bigchaindb.models import Block, Transaction
assets = [
{'msg': '1'},
{'msg': '2'},
{'msg': '3'},
]
txs = []
# create 3 assets
for asset in assets:
tx = Transaction.create([b.me], [([b.me], 1)], asset=asset)
txs.append(tx)
# create a `TRANSFER` transaction.
# the asset in `TRANSFER` transactions is not extracted
tx = Transaction.transfer(txs[0].to_inputs(), [([b.me], 1)],
asset_id=txs[0].id)
txs.append(tx)
# create the block
block = Block(txs)
# decouple assets
assets_from_block, block_dict = block.decouple_assets()
assert len(assets_from_block) == 3
for i in range(3):
assert assets_from_block[i]['data'] == assets[i]
assert assets_from_block[i]['id'] == txs[i].id
# check the `TRANSFER` transaction was not changed
assert block.transactions[3].to_dict() == \
block_dict['block']['transactions'][3]
def test_couple_assets(self, b):
from bigchaindb.models import Block, Transaction
assets = [
{'msg': '1'},
{'msg': '2'},
{'msg': '3'},
]
txs = []
# create 3 assets
for asset in assets:
tx = Transaction.create([b.me], [([b.me], 1)], asset=asset)
txs.append(tx)
# create a `TRANSFER` transaction.
# the asset in `TRANSFER` transactions is not extracted
tx = Transaction.transfer(txs[0].to_inputs(), [([b.me], 1)],
asset_id=txs[0].id)
txs.append(tx)
# create the block
block = Block(txs)
# decouple assets
assets_from_block, block_dict = block.decouple_assets()
# reconstruct the block
block_dict_reconstructed = Block.couple_assets(block_dict,
assets_from_block)
# check that the reconstructed block is the same as the original block
assert block == Block.from_dict(block_dict_reconstructed)
def test_get_asset_ids(self, b):
from bigchaindb.models import Block, Transaction
assets = [
{'msg': '1'},
{'msg': '2'},
{'msg': '3'},
]
txs = []
# create 3 assets
for asset in assets:
tx = Transaction.create([b.me], [([b.me], 1)], asset=asset)
txs.append(tx)
# create a `TRANSFER` transaction.
# the asset in `TRANSFER` transactions is not extracted
tx = Transaction.transfer(txs[0].to_inputs(), [([b.me], 1)],
asset_id=txs[0].id)
txs.append(tx)
# create the block
block = Block(txs)
# decouple assets
assets_from_block, block_dict = block.decouple_assets()
# get the asset_ids and check that they are the same as the `CREATE`
# transactions
asset_ids = Block.get_asset_ids(block_dict)
assert asset_ids == [tx.id for tx in txs[:-1]]
@pytest.mark.bdb
def test_from_db(self, b):
from bigchaindb.models import Block, Transaction
assets = [
{'msg': '1'},
{'msg': '2'},
{'msg': '3'},
]
txs = []
# create 3 assets
for asset in assets:
tx = Transaction.create([b.me], [([b.me], 1)], asset=asset)
txs.append(tx)
# create a `TRANSFER` transaction.
# the asset in `TRANSFER` transactions is not extracted
tx = Transaction.transfer(txs[0].to_inputs(), [([b.me], 1)],
asset_id=txs[0].id)
txs.append(tx)
# create the block
block = Block(txs)
# decouple assets
assets_from_block, block_dict = block.decouple_assets()
# write the assets and block separately
b.write_assets(assets_from_block)
b.write_block(block)
# check the reconstructed block is the same as the original block
block_from_db = Block.from_db(b, block_dict)
assert block == block_from_db

View File

@ -32,6 +32,7 @@ def flush_rethink_db(connection, dbname):
connection.run(r.db(dbname).table('bigchain').delete())
connection.run(r.db(dbname).table('backlog').delete())
connection.run(r.db(dbname).table('votes').delete())
connection.run(r.db(dbname).table('assets').delete())
except r.ReqlOpFailedError:
pass
@ -41,6 +42,7 @@ def flush_mongo_db(connection, dbname):
connection.conn[dbname].bigchain.delete_many({})
connection.conn[dbname].backlog.delete_many({})
connection.conn[dbname].votes.delete_many({})
connection.conn[dbname].assets.delete_many({})
@singledispatch