diff --git a/circuits/BatchTreeUpdate.circom b/circuits/BatchTreeUpdate.circom index 59b98e2..ceb035b 100644 --- a/circuits/BatchTreeUpdate.circom +++ b/circuits/BatchTreeUpdate.circom @@ -3,6 +3,7 @@ include "../node_modules/circomlib/circuits/bitify.circom"; include "./MerkleTreeUpdater.circom"; include "./TreeUpdateArgsHasher.circom"; +// Computes hashes of the next tree layer template TreeLayer(height) { var nItems = 1 << height; signal input ins[nItems * 2]; @@ -19,6 +20,7 @@ template TreeLayer(height) { // Inserts a leaf batch into a tree // Checks that tree previously contained zero leaves in the same position +// Hashes leaves with Poseidon hash template BatchTreeUpdate(levels, batchLevels, zeroBatchLeaf) { var height = levels - batchLevels; var nLeaves = 1 << batchLevels; diff --git a/circuits/TreeUpdateArgsHasher.circom b/circuits/TreeUpdateArgsHasher.circom index 13d11fb..4fb7eb0 100644 --- a/circuits/TreeUpdateArgsHasher.circom +++ b/circuits/TreeUpdateArgsHasher.circom @@ -1,13 +1,15 @@ include "../node_modules/circomlib/circuits/bitify.circom"; include "../node_modules/circomlib/circuits/sha256/sha256.circom"; +// Computes a SHA256 hash of all inputs packed into a byte array +// Field elements are padded to 256 bits with zeroes template TreeUpdateArgsHasher(nLeaves) { - signal private input oldRoot; - signal private input newRoot; - signal private input pathIndices; - signal private input instances[nLeaves]; - signal private input hashes[nLeaves]; - signal private input blocks[nLeaves]; + signal input oldRoot; + signal input newRoot; + signal input pathIndices; + signal input instances[nLeaves]; + signal input hashes[nLeaves]; + signal input blocks[nLeaves]; signal output out; var header = 256 + 256 + 32; diff --git a/contracts/TornadoTrees.sol b/contracts/TornadoTrees.sol index 5452b68..06671ca 100644 --- a/contracts/TornadoTrees.sol +++ b/contracts/TornadoTrees.sol @@ -100,6 +100,7 @@ contract TornadoTrees is Initializable { withdrawalsLength = withdrawalsV1Length; } + /// @dev Queue a new deposit data to be inserted into a merkle tree function registerDeposit(address _instance, bytes32 _commitment) public onlyTornadoProxy { uint256 _depositsLength = depositsLength; deposits[_depositsLength] = keccak256(abi.encode(_instance, _commitment, blockNumber())); @@ -107,6 +108,7 @@ contract TornadoTrees is Initializable { depositsLength = _depositsLength + 1; } + /// @dev Queue a new withdrawal data to be inserted into a merkle tree function registerWithdrawal(address _instance, bytes32 _nullifierHash) public onlyTornadoProxy { uint256 _withdrawalsLength = withdrawalsLength; withdrawals[_withdrawalsLength] = keccak256(abi.encode(_instance, _nullifierHash, blockNumber())); @@ -114,6 +116,13 @@ contract TornadoTrees is Initializable { withdrawalsLength = _withdrawalsLength + 1; } + /// @dev Insert a full batch of queued deposits into a merkle tree + /// @param _proof A snark proof that elements were inserted correctly + /// @param _argsHash A hash of snark inputs + /// @param _argsHash Current merkle tree root + /// @param _newRoot Updated merkle tree root + /// @param _pathIndices Merkle path to inserted batch + /// @param _events A batch of inserted events (leaves) function updateDepositTree( bytes calldata _proof, bytes32 _argsHash, @@ -158,6 +167,13 @@ contract TornadoTrees is Initializable { lastProcessedDepositLeaf = offset + CHUNK_SIZE; } + /// @dev Insert a full batch of queued withdrawals into a merkle tree + /// @param _proof A snark proof that elements were inserted correctly + /// @param _argsHash A hash of snark inputs + /// @param _argsHash Current merkle tree root + /// @param _newRoot Updated merkle tree root + /// @param _pathIndices Merkle path to inserted batch + /// @param _events A batch of inserted events (leaves) function updateWithdrawalTree( bytes calldata _proof, bytes32 _argsHash,