mirror of
https://github.com/tornadocash/tornado-nova
synced 2024-02-02 14:53:56 +01:00
onchain tree
This commit is contained in:
parent
d374e28f69
commit
70f28dd845
@ -17,7 +17,6 @@ nullifier = hash(commitment, privKey, merklePath)
|
|||||||
// Universal JoinSplit transaction with nIns inputs and 2 outputs
|
// Universal JoinSplit transaction with nIns inputs and 2 outputs
|
||||||
template Transaction(levels, nIns, nOuts, zeroLeaf) {
|
template Transaction(levels, nIns, nOuts, zeroLeaf) {
|
||||||
signal input root;
|
signal input root;
|
||||||
signal input newRoot;
|
|
||||||
// extAmount = external amount used for deposits and withdrawals
|
// extAmount = external amount used for deposits and withdrawals
|
||||||
// correct extAmount range is enforced on the smart contract
|
// correct extAmount range is enforced on the smart contract
|
||||||
// publicAmount = extAmount - fee
|
// publicAmount = extAmount - fee
|
||||||
@ -37,7 +36,7 @@ template Transaction(levels, nIns, nOuts, zeroLeaf) {
|
|||||||
signal private input outAmount[nOuts];
|
signal private input outAmount[nOuts];
|
||||||
signal private input outBlinding[nOuts];
|
signal private input outBlinding[nOuts];
|
||||||
signal private input outPubkey[nOuts];
|
signal private input outPubkey[nOuts];
|
||||||
signal input outPathIndices;
|
signal private input outPathIndices;
|
||||||
signal private input outPathElements[levels - 1];
|
signal private input outPathElements[levels - 1];
|
||||||
|
|
||||||
component inKeypair[nIns];
|
component inKeypair[nIns];
|
||||||
@ -118,17 +117,6 @@ template Transaction(levels, nIns, nOuts, zeroLeaf) {
|
|||||||
// verify amount invariant
|
// verify amount invariant
|
||||||
sumIns + publicAmount === sumOuts;
|
sumIns + publicAmount === sumOuts;
|
||||||
|
|
||||||
// Check merkle tree update with inserted transaction outputs
|
// optional safety constraint to make sure extDataHash cannot be changed
|
||||||
component treeUpdater = TreeUpdater(levels, 1 /* log2(nOuts) */, zeroLeaf);
|
|
||||||
treeUpdater.oldRoot <== root;
|
|
||||||
treeUpdater.newRoot <== newRoot;
|
|
||||||
for (var i = 0; i < nOuts; i++) {
|
|
||||||
treeUpdater.leaves[i] <== outputCommitment[i];
|
|
||||||
}
|
|
||||||
treeUpdater.pathIndices <== outPathIndices;
|
|
||||||
for (var i = 0; i < levels - 1; i++) {
|
|
||||||
treeUpdater.pathElements[i] <== outPathElements[i];
|
|
||||||
}
|
|
||||||
|
|
||||||
signal extDataSquare <== extDataHash * extDataHash;
|
signal extDataSquare <== extDataHash * extDataHash;
|
||||||
}
|
}
|
||||||
|
159
contracts/MerkleTreeWithHistory.sol
Normal file
159
contracts/MerkleTreeWithHistory.sol
Normal file
@ -0,0 +1,159 @@
|
|||||||
|
// https://tornado.cash
|
||||||
|
/*
|
||||||
|
* d888888P dP a88888b. dP
|
||||||
|
* 88 88 d8' `88 88
|
||||||
|
* 88 .d8888b. 88d888b. 88d888b. .d8888b. .d888b88 .d8888b. 88 .d8888b. .d8888b. 88d888b.
|
||||||
|
* 88 88' `88 88' `88 88' `88 88' `88 88' `88 88' `88 88 88' `88 Y8ooooo. 88' `88
|
||||||
|
* 88 88. .88 88 88 88 88. .88 88. .88 88. .88 dP Y8. .88 88. .88 88 88 88
|
||||||
|
* dP `88888P' dP dP dP `88888P8 `88888P8 `88888P' 88 Y88888P' `88888P8 `88888P' dP dP
|
||||||
|
* ooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo
|
||||||
|
*/
|
||||||
|
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
pragma solidity ^0.7.0;
|
||||||
|
|
||||||
|
import "@openzeppelin/contracts-upgradeable/proxy/Initializable.sol";
|
||||||
|
|
||||||
|
interface IHasher {
|
||||||
|
function poseidon(bytes32[2] calldata inputs) external pure returns (bytes32);
|
||||||
|
}
|
||||||
|
|
||||||
|
contract MerkleTreeWithHistory is Initializable {
|
||||||
|
uint256 public constant FIELD_SIZE = 21888242871839275222246405745257275088548364400416034343698204186575808495617;
|
||||||
|
uint256 public constant ZERO_VALUE = 21663839004416932945382355908790599225266501822907911457504978515578255421292; // = keccak256("tornado") % FIELD_SIZE
|
||||||
|
|
||||||
|
IHasher public immutable hasher;
|
||||||
|
uint32 public immutable levels;
|
||||||
|
|
||||||
|
// the following variables are made public for easier testing and debugging and
|
||||||
|
// are not supposed to be accessed in regular code
|
||||||
|
|
||||||
|
// filledSubtrees and roots could be bytes32[size], but using mappings makes it cheaper because
|
||||||
|
// it removes index range check on every interaction
|
||||||
|
mapping(uint256 => bytes32) public filledSubtrees;
|
||||||
|
mapping(uint256 => bytes32) public roots;
|
||||||
|
uint32 public constant ROOT_HISTORY_SIZE = 30;
|
||||||
|
uint32 public currentRootIndex = 0; // todo remove
|
||||||
|
uint32 public nextIndex = 0;
|
||||||
|
|
||||||
|
constructor(uint32 _levels, address _hasher) {
|
||||||
|
require(_levels > 0, "_levels should be greater than zero");
|
||||||
|
require(_levels < 31, "_levels should be less than 31");
|
||||||
|
levels = _levels;
|
||||||
|
hasher = IHasher(_hasher);
|
||||||
|
}
|
||||||
|
|
||||||
|
function initialize() external initializer {
|
||||||
|
for (uint32 i = 0; i < levels; i++) {
|
||||||
|
filledSubtrees[i] = zeros(i);
|
||||||
|
}
|
||||||
|
|
||||||
|
roots[0] = zeros(levels);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
@dev Hash 2 tree leaves, returns Poseidon(_left, _right)
|
||||||
|
*/
|
||||||
|
function hashLeftRight(bytes32 _left, bytes32 _right) public view returns (bytes32) {
|
||||||
|
require(uint256(_left) < FIELD_SIZE, "_left should be inside the field");
|
||||||
|
require(uint256(_right) < FIELD_SIZE, "_right should be inside the field");
|
||||||
|
bytes32[2] memory input;
|
||||||
|
input[0] = _left;
|
||||||
|
input[1] = _right;
|
||||||
|
return hasher.poseidon(input);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Modified to insert pairs of leaves for better efficiency
|
||||||
|
function _insert(bytes32 _leaf1, bytes32 _leaf2) internal returns (uint32 index) {
|
||||||
|
uint32 _nextIndex = nextIndex;
|
||||||
|
require(_nextIndex != uint32(2)**levels, "Merkle tree is full. No more leaves can be added");
|
||||||
|
uint32 currentIndex = _nextIndex / 2;
|
||||||
|
bytes32 currentLevelHash = hashLeftRight(_leaf1, _leaf2);
|
||||||
|
bytes32 left;
|
||||||
|
bytes32 right;
|
||||||
|
|
||||||
|
for (uint32 i = 1; i < levels; i++) {
|
||||||
|
if (currentIndex % 2 == 0) {
|
||||||
|
left = currentLevelHash;
|
||||||
|
right = zeros(i);
|
||||||
|
filledSubtrees[i] = currentLevelHash;
|
||||||
|
} else {
|
||||||
|
left = filledSubtrees[i];
|
||||||
|
right = currentLevelHash;
|
||||||
|
}
|
||||||
|
currentLevelHash = hashLeftRight(left, right);
|
||||||
|
currentIndex /= 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
uint32 newRootIndex = (currentRootIndex + 1) % ROOT_HISTORY_SIZE;
|
||||||
|
currentRootIndex = newRootIndex;
|
||||||
|
roots[newRootIndex] = currentLevelHash;
|
||||||
|
nextIndex = _nextIndex + 2;
|
||||||
|
return _nextIndex;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
@dev Whether the root is present in the root history
|
||||||
|
*/
|
||||||
|
function isKnownRoot(bytes32 _root) public view returns (bool) {
|
||||||
|
if (_root == 0) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
uint32 _currentRootIndex = currentRootIndex;
|
||||||
|
uint32 i = _currentRootIndex;
|
||||||
|
do {
|
||||||
|
if (_root == roots[i]) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (i == 0) {
|
||||||
|
i = ROOT_HISTORY_SIZE;
|
||||||
|
}
|
||||||
|
i--;
|
||||||
|
} while (i != _currentRootIndex);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
@dev Returns the last root
|
||||||
|
*/
|
||||||
|
function getLastRoot() public view returns (bytes32) {
|
||||||
|
return roots[currentRootIndex];
|
||||||
|
}
|
||||||
|
|
||||||
|
/// @dev provides Zero (Empty) elements for a MiMC MerkleTree. Up to 32 levels
|
||||||
|
function zeros(uint256 i) public pure returns (bytes32) {
|
||||||
|
if (i == 0) return bytes32(0x2fe54c60d3acabf3343a35b6eba15db4821b340f76e741e2249685ed4899af6c);
|
||||||
|
else if (i == 1) return bytes32(0x1a332ca2cd2436bdc6796e6e4244ebf6f7e359868b7252e55342f766e4088082);
|
||||||
|
else if (i == 2) return bytes32(0x2fb19ac27499bdf9d7d3b387eff42b6d12bffbc6206e81d0ef0b0d6b24520ebd);
|
||||||
|
else if (i == 3) return bytes32(0x18d0d6e282d4eacbf18efc619a986db763b75095ed122fac7d4a49418daa42e1);
|
||||||
|
else if (i == 4) return bytes32(0x054dec40f76a0f5aaeff1a85a4a3721b92b4ad244362d30b0ef8ed7033de11d3);
|
||||||
|
else if (i == 5) return bytes32(0x1d24c91f8d40f1c2591edec19d392905cf5eb01eada48d71836177ef11aea5b2);
|
||||||
|
else if (i == 6) return bytes32(0x0fb63621cfc047eba2159faecfa55b120d7c81c0722633ef94e20e27675e378f);
|
||||||
|
else if (i == 7) return bytes32(0x277b08f214fe8c5504a79614cdec5abd7b6adc9133fe926398684c82fd798b44);
|
||||||
|
else if (i == 8) return bytes32(0x2633613437c1fd97f7c798e2ea30d52cfddee56d74f856a541320ae86ddaf2de);
|
||||||
|
else if (i == 9) return bytes32(0x00768963fa4b993fbfece3619bfaa3ca4afd7e3864f11b09a0849dbf4ad25807);
|
||||||
|
else if (i == 10) return bytes32(0x0e63ff9df484c1a21478bd27111763ef203177ec0a7ef3a3cd43ec909f587bb0);
|
||||||
|
else if (i == 11) return bytes32(0x0e6a4bfb0dd0ac8bf5517eaac48a95ba783dabe9f64494f9c892d3e8431eaab3);
|
||||||
|
else if (i == 12) return bytes32(0x0164a46b3ffff8baca00de7a130a63d105f1578076838502b99488505d5b3d35);
|
||||||
|
else if (i == 13) return bytes32(0x145a6f1521c02b250cc76eb35cd67c9b0b22473577de3778e4c51903836c8957);
|
||||||
|
else if (i == 14) return bytes32(0x29849fc5b55303a660bad33d986fd156d48516ec58a0f0a561a03b704a802254);
|
||||||
|
else if (i == 15) return bytes32(0x26639dd486b374e98ac6da34e8651b3fca58c51f1c2f857dd82045f27fc8dbe6);
|
||||||
|
else if (i == 16) return bytes32(0x2aa39214b887ee877e60afdb191390344c68177c30a0b8646649774174de5e33);
|
||||||
|
else if (i == 17) return bytes32(0x09b397d253e41a521d042ffe01f8c33ae37d4c7da21af68693aafb63d599d708);
|
||||||
|
else if (i == 18) return bytes32(0x02fbfd397ad901cea38553239aefec016fcb6a19899038503f04814cbb79a511);
|
||||||
|
else if (i == 19) return bytes32(0x266640a877ec97a91f6c95637f843eeac8718f53f311bac9cba7d958df646f9d);
|
||||||
|
else if (i == 20) return bytes32(0x29f9a0a07a22ab214d00aaa0190f54509e853f3119009baecb0035347606b0a9);
|
||||||
|
else if (i == 21) return bytes32(0x0a1fda67bffa0ab3a755f23fdcf922720820b6a96616a5ca34643cd0b935e3d6);
|
||||||
|
else if (i == 22) return bytes32(0x19507199eb76b5ec5abe538a01471d03efb6c6984739c77ec61ada2ba2afb389);
|
||||||
|
else if (i == 23) return bytes32(0x26bd93d26b751484942282e27acfb6d193537327a831df6927e19cdfc73c3e64);
|
||||||
|
else if (i == 24) return bytes32(0x2eb88a9c6b00a4bc6ea253268090fe1d255f6fe02d2eb745517723aae44d7386);
|
||||||
|
else if (i == 25) return bytes32(0x13e50d0bda78be97792df40273cbb16f0dc65c0697d81a82d07d0f6eee80a164);
|
||||||
|
else if (i == 26) return bytes32(0x2ea95776929000133246ff8d9fdcba179d0b262b9e910558309bac1c1ec03d7a);
|
||||||
|
else if (i == 27) return bytes32(0x1a640d6ef66e356c795396c0957b06a99891afe0c493f4d0bdfc0450764bae60);
|
||||||
|
else if (i == 28) return bytes32(0x2b17979f2c2048dd9e4ee5f482cced21435ea8cc54c32f80562e39a5016b0496);
|
||||||
|
else if (i == 29) return bytes32(0x29ba6a30de50542e261abfc7ee0c68911002d3acd4dd4c02ad59aa96805b20bb);
|
||||||
|
else if (i == 30) return bytes32(0x103fcf1c8a98ebe50285f6e669077a579308311fd44bb6895d5da7ba7fd3564e);
|
||||||
|
else if (i == 31) return bytes32(0x166bdd01780976e655f5278260c638dcf10fe7c136f37c9152cbcaabef901f4d);
|
||||||
|
else revert("Index out of bounds");
|
||||||
|
}
|
||||||
|
}
|
12
contracts/Mocks/MerkleTreeWithHistoryMock.sol
Normal file
12
contracts/Mocks/MerkleTreeWithHistoryMock.sol
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
pragma solidity ^0.7.0;
|
||||||
|
|
||||||
|
import "../MerkleTreeWithHistory.sol";
|
||||||
|
|
||||||
|
contract MerkleTreeWithHistoryMock is MerkleTreeWithHistory {
|
||||||
|
constructor(uint32 _levels, address _hasher) MerkleTreeWithHistory(_levels, _hasher) {}
|
||||||
|
|
||||||
|
function insert(bytes32 _leaf1, bytes32 _leaf2) public returns (uint32 index) {
|
||||||
|
return _insert(_leaf1, _leaf2);
|
||||||
|
}
|
||||||
|
}
|
@ -12,26 +12,23 @@
|
|||||||
|
|
||||||
pragma solidity ^0.7.0;
|
pragma solidity ^0.7.0;
|
||||||
pragma experimental ABIEncoderV2;
|
pragma experimental ABIEncoderV2;
|
||||||
import "@openzeppelin/contracts-upgradeable/proxy/Initializable.sol";
|
import "./MerkleTreeWithHistory.sol";
|
||||||
|
|
||||||
interface IVerifier {
|
interface IVerifier {
|
||||||
function verifyProof(bytes memory _proof, uint256[9] memory _input) external view returns (bool);
|
function verifyProof(bytes memory _proof, uint256[7] memory _input) external view returns (bool);
|
||||||
|
|
||||||
function verifyProof(bytes memory _proof, uint256[23] memory _input) external view returns (bool);
|
function verifyProof(bytes memory _proof, uint256[21] memory _input) external view returns (bool);
|
||||||
}
|
}
|
||||||
|
|
||||||
interface ERC20 {
|
interface ERC20 {
|
||||||
function transfer(address to, uint256 value) external returns (bool);
|
function transfer(address to, uint256 value) external returns (bool);
|
||||||
}
|
}
|
||||||
|
|
||||||
contract TornadoPool is Initializable {
|
contract TornadoPool is MerkleTreeWithHistory {
|
||||||
uint256 public constant FIELD_SIZE = 21888242871839275222246405745257275088548364400416034343698204186575808495617;
|
|
||||||
int256 public constant MAX_EXT_AMOUNT = 2**248;
|
int256 public constant MAX_EXT_AMOUNT = 2**248;
|
||||||
uint256 public constant MAX_FEE = 2**248;
|
uint256 public constant MAX_FEE = 2**248;
|
||||||
|
|
||||||
mapping(bytes32 => bool) public nullifierHashes;
|
mapping(bytes32 => bool) public nullifierHashes;
|
||||||
bytes32 public currentRoot;
|
|
||||||
uint256 public currentCommitmentIndex;
|
|
||||||
IVerifier public immutable verifier2;
|
IVerifier public immutable verifier2;
|
||||||
IVerifier public immutable verifier16;
|
IVerifier public immutable verifier16;
|
||||||
|
|
||||||
@ -47,10 +44,8 @@ contract TornadoPool is Initializable {
|
|||||||
struct Proof {
|
struct Proof {
|
||||||
bytes proof;
|
bytes proof;
|
||||||
bytes32 root;
|
bytes32 root;
|
||||||
bytes32 newRoot;
|
|
||||||
bytes32[] inputNullifiers;
|
bytes32[] inputNullifiers;
|
||||||
bytes32[2] outputCommitments;
|
bytes32[2] outputCommitments;
|
||||||
uint256 outPathIndices;
|
|
||||||
uint256 publicAmount;
|
uint256 publicAmount;
|
||||||
bytes32 extDataHash;
|
bytes32 extDataHash;
|
||||||
}
|
}
|
||||||
@ -70,28 +65,25 @@ contract TornadoPool is Initializable {
|
|||||||
@param _verifier2 the address of SNARK verifier for 2 inputs
|
@param _verifier2 the address of SNARK verifier for 2 inputs
|
||||||
@param _verifier16 the address of SNARK verifier for 16 inputs
|
@param _verifier16 the address of SNARK verifier for 16 inputs
|
||||||
*/
|
*/
|
||||||
constructor(IVerifier _verifier2, IVerifier _verifier16) {
|
constructor(
|
||||||
|
IVerifier _verifier2,
|
||||||
|
IVerifier _verifier16,
|
||||||
|
uint32 _levels,
|
||||||
|
address _hasher
|
||||||
|
) MerkleTreeWithHistory(_levels, _hasher) {
|
||||||
verifier2 = _verifier2;
|
verifier2 = _verifier2;
|
||||||
verifier16 = _verifier16;
|
verifier16 = _verifier16;
|
||||||
}
|
}
|
||||||
|
|
||||||
function initialize(bytes32 _currentRoot) external initializer {
|
|
||||||
currentRoot = _currentRoot;
|
|
||||||
}
|
|
||||||
|
|
||||||
function transaction(Proof calldata _args, ExtData calldata _extData) public payable {
|
function transaction(Proof calldata _args, ExtData calldata _extData) public payable {
|
||||||
require(currentRoot == _args.root, "Invalid merkle root");
|
require(isKnownRoot(_args.root), "Invalid merkle root");
|
||||||
for (uint256 i = 0; i < _args.inputNullifiers.length; i++) {
|
for (uint256 i = 0; i < _args.inputNullifiers.length; i++) {
|
||||||
require(!isSpent(_args.inputNullifiers[i]), "Input is already spent");
|
require(!isSpent(_args.inputNullifiers[i]), "Input is already spent");
|
||||||
}
|
}
|
||||||
require(uint256(_args.extDataHash) == uint256(keccak256(abi.encode(_extData))) % FIELD_SIZE, "Incorrect external data hash");
|
require(uint256(_args.extDataHash) == uint256(keccak256(abi.encode(_extData))) % FIELD_SIZE, "Incorrect external data hash");
|
||||||
uint256 cachedCommitmentIndex = currentCommitmentIndex;
|
|
||||||
require(_args.outPathIndices == cachedCommitmentIndex >> 1, "Invalid merkle tree insert position");
|
|
||||||
require(_args.publicAmount == calculatePublicAmount(_extData.extAmount, _extData.fee), "Invalid public amount");
|
require(_args.publicAmount == calculatePublicAmount(_extData.extAmount, _extData.fee), "Invalid public amount");
|
||||||
require(verifyProof(_args), "Invalid transaction proof");
|
require(verifyProof(_args), "Invalid transaction proof");
|
||||||
|
|
||||||
currentRoot = _args.newRoot;
|
|
||||||
currentCommitmentIndex = cachedCommitmentIndex + 2;
|
|
||||||
for (uint256 i = 0; i < _args.inputNullifiers.length; i++) {
|
for (uint256 i = 0; i < _args.inputNullifiers.length; i++) {
|
||||||
nullifierHashes[_args.inputNullifiers[i]] = true;
|
nullifierHashes[_args.inputNullifiers[i]] = true;
|
||||||
}
|
}
|
||||||
@ -110,8 +102,9 @@ contract TornadoPool is Initializable {
|
|||||||
_transfer(_extData.relayer, _extData.fee);
|
_transfer(_extData.relayer, _extData.fee);
|
||||||
}
|
}
|
||||||
|
|
||||||
emit NewCommitment(_args.outputCommitments[0], cachedCommitmentIndex, _extData.encryptedOutput1);
|
_insert(_args.outputCommitments[0], _args.outputCommitments[1]);
|
||||||
emit NewCommitment(_args.outputCommitments[1], cachedCommitmentIndex + 1, _extData.encryptedOutput2);
|
emit NewCommitment(_args.outputCommitments[0], nextIndex - 2, _extData.encryptedOutput1);
|
||||||
|
emit NewCommitment(_args.outputCommitments[1], nextIndex - 1, _extData.encryptedOutput2);
|
||||||
for (uint256 i = 0; i < _args.inputNullifiers.length; i++) {
|
for (uint256 i = 0; i < _args.inputNullifiers.length; i++) {
|
||||||
emit NewNullifier(_args.inputNullifiers[i]);
|
emit NewNullifier(_args.inputNullifiers[i]);
|
||||||
}
|
}
|
||||||
@ -148,14 +141,12 @@ contract TornadoPool is Initializable {
|
|||||||
_args.proof,
|
_args.proof,
|
||||||
[
|
[
|
||||||
uint256(_args.root),
|
uint256(_args.root),
|
||||||
uint256(_args.newRoot),
|
|
||||||
_args.publicAmount,
|
_args.publicAmount,
|
||||||
uint256(_args.extDataHash),
|
uint256(_args.extDataHash),
|
||||||
uint256(_args.inputNullifiers[0]),
|
uint256(_args.inputNullifiers[0]),
|
||||||
uint256(_args.inputNullifiers[1]),
|
uint256(_args.inputNullifiers[1]),
|
||||||
uint256(_args.outputCommitments[0]),
|
uint256(_args.outputCommitments[0]),
|
||||||
uint256(_args.outputCommitments[1]),
|
uint256(_args.outputCommitments[1])
|
||||||
_args.outPathIndices
|
|
||||||
]
|
]
|
||||||
);
|
);
|
||||||
} else if (_args.inputNullifiers.length == 16) {
|
} else if (_args.inputNullifiers.length == 16) {
|
||||||
@ -164,7 +155,6 @@ contract TornadoPool is Initializable {
|
|||||||
_args.proof,
|
_args.proof,
|
||||||
[
|
[
|
||||||
uint256(_args.root),
|
uint256(_args.root),
|
||||||
uint256(_args.newRoot),
|
|
||||||
_args.publicAmount,
|
_args.publicAmount,
|
||||||
uint256(_args.extDataHash),
|
uint256(_args.extDataHash),
|
||||||
uint256(_args.inputNullifiers[0]),
|
uint256(_args.inputNullifiers[0]),
|
||||||
@ -184,8 +174,7 @@ contract TornadoPool is Initializable {
|
|||||||
uint256(_args.inputNullifiers[14]),
|
uint256(_args.inputNullifiers[14]),
|
||||||
uint256(_args.inputNullifiers[15]),
|
uint256(_args.inputNullifiers[15]),
|
||||||
uint256(_args.outputCommitments[0]),
|
uint256(_args.outputCommitments[0]),
|
||||||
uint256(_args.outputCommitments[1]),
|
uint256(_args.outputCommitments[1])
|
||||||
_args.outPathIndices
|
|
||||||
]
|
]
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
|
22
scripts/compileHasher.js
Normal file
22
scripts/compileHasher.js
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
// Generates Hasher artifact at compile-time using external compilermechanism
|
||||||
|
const path = require('path')
|
||||||
|
const fs = require('fs')
|
||||||
|
const genContract = require('circomlib/src/poseidon_gencontract.js')
|
||||||
|
const outputPath = path.join(__dirname, '..', 'artifacts', 'contracts')
|
||||||
|
const outputFile = path.join(outputPath, 'Hasher.json')
|
||||||
|
|
||||||
|
if (!fs.existsSync(outputPath)) {
|
||||||
|
fs.mkdirSync(outputPath, { recursive: true })
|
||||||
|
}
|
||||||
|
|
||||||
|
const contract = {
|
||||||
|
_format: 'hh-sol-artifact-1',
|
||||||
|
sourceName: 'contracts/Hasher.sol',
|
||||||
|
linkReferences: {},
|
||||||
|
deployedLinkReferences: {},
|
||||||
|
contractName: 'Hasher',
|
||||||
|
abi: genContract.generateABI(2),
|
||||||
|
bytecode: genContract.createCode(2),
|
||||||
|
}
|
||||||
|
|
||||||
|
fs.writeFileSync(outputFile, JSON.stringify(contract, null, 2))
|
@ -58,7 +58,6 @@ async function getProof({ inputs, outputs, tree, extAmount, fee, recipient, rela
|
|||||||
const extDataHash = getExtDataHash(extData)
|
const extDataHash = getExtDataHash(extData)
|
||||||
let input = {
|
let input = {
|
||||||
root: oldRoot,
|
root: oldRoot,
|
||||||
newRoot: tree.root(),
|
|
||||||
inputNullifier: inputs.map((x) => x.getNullifier()),
|
inputNullifier: inputs.map((x) => x.getNullifier()),
|
||||||
outputCommitment: outputs.map((x) => x.getCommitment()),
|
outputCommitment: outputs.map((x) => x.getCommitment()),
|
||||||
publicAmount: BigNumber.from(extAmount).sub(fee).add(FIELD_SIZE).mod(FIELD_SIZE).toString(),
|
publicAmount: BigNumber.from(extAmount).sub(fee).add(FIELD_SIZE).mod(FIELD_SIZE).toString(),
|
||||||
@ -84,10 +83,8 @@ async function getProof({ inputs, outputs, tree, extAmount, fee, recipient, rela
|
|||||||
const args = {
|
const args = {
|
||||||
proof,
|
proof,
|
||||||
root: toFixedHex(input.root),
|
root: toFixedHex(input.root),
|
||||||
newRoot: toFixedHex(input.newRoot),
|
|
||||||
inputNullifiers: inputs.map((x) => toFixedHex(x.getNullifier())),
|
inputNullifiers: inputs.map((x) => toFixedHex(x.getNullifier())),
|
||||||
outputCommitments: outputs.map((x) => toFixedHex(x.getCommitment())),
|
outputCommitments: outputs.map((x) => toFixedHex(x.getCommitment())),
|
||||||
outPathIndices: toFixedHex(outputIndex >> outputBatchBits),
|
|
||||||
publicAmount: toFixedHex(input.publicAmount),
|
publicAmount: toFixedHex(input.publicAmount),
|
||||||
extDataHash: toFixedHex(extDataHash),
|
extDataHash: toFixedHex(extDataHash),
|
||||||
}
|
}
|
||||||
|
@ -3,15 +3,13 @@ const { ethers, waffle } = hre
|
|||||||
const { loadFixture } = waffle
|
const { loadFixture } = waffle
|
||||||
const { expect } = require('chai')
|
const { expect } = require('chai')
|
||||||
|
|
||||||
const { poseidonHash2 } = require('../src/utils')
|
const { toFixedHex } = require('../src/utils')
|
||||||
const Utxo = require('../src/utxo')
|
const Utxo = require('../src/utxo')
|
||||||
|
|
||||||
const MERKLE_TREE_HEIGHT = 5
|
|
||||||
const MerkleTree = require('fixed-merkle-tree')
|
|
||||||
|
|
||||||
const { transaction, registerAndTransact } = require('../src/index')
|
const { transaction, registerAndTransact } = require('../src/index')
|
||||||
const { Keypair } = require('../src/keypair')
|
const { Keypair } = require('../src/keypair')
|
||||||
|
|
||||||
|
const MERKLE_TREE_HEIGHT = 5
|
||||||
|
|
||||||
describe('TornadoPool', function () {
|
describe('TornadoPool', function () {
|
||||||
this.timeout(20000)
|
this.timeout(20000)
|
||||||
|
|
||||||
@ -22,14 +20,19 @@ describe('TornadoPool', function () {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async function fixture() {
|
async function fixture() {
|
||||||
|
require('../scripts/compileHasher')
|
||||||
const verifier2 = await deploy('Verifier2')
|
const verifier2 = await deploy('Verifier2')
|
||||||
const verifier16 = await deploy('Verifier16')
|
const verifier16 = await deploy('Verifier16')
|
||||||
|
const hasher = await deploy('Hasher')
|
||||||
const tree = new MerkleTree(MERKLE_TREE_HEIGHT, [], { hashFunction: poseidonHash2 })
|
|
||||||
|
|
||||||
/** @type {TornadoPool} */
|
/** @type {TornadoPool} */
|
||||||
const tornadoPool = await deploy('TornadoPool', verifier2.address, verifier16.address)
|
const tornadoPool = await deploy(
|
||||||
await tornadoPool.initialize(tree.root())
|
'TornadoPool',
|
||||||
|
verifier2.address,
|
||||||
|
verifier16.address,
|
||||||
|
MERKLE_TREE_HEIGHT,
|
||||||
|
hasher.address,
|
||||||
|
)
|
||||||
|
await tornadoPool.initialize()
|
||||||
return { tornadoPool }
|
return { tornadoPool }
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -48,7 +51,7 @@ describe('TornadoPool', function () {
|
|||||||
const TornadoPool = await ethers.getContractFactory('TornadoPool')
|
const TornadoPool = await ethers.getContractFactory('TornadoPool')
|
||||||
/** @type {TornadoPool} */
|
/** @type {TornadoPool} */
|
||||||
const tornadoPoolProxied = TornadoPool.attach(proxy.address)
|
const tornadoPoolProxied = TornadoPool.attach(proxy.address)
|
||||||
await tornadoPoolProxied.initialize(await tornadoPool.currentRoot())
|
await tornadoPoolProxied.initialize()
|
||||||
|
|
||||||
return { tornadoPool: tornadoPoolProxied, proxy, gov, messenger }
|
return { tornadoPool: tornadoPoolProxied, proxy, gov, messenger }
|
||||||
}
|
}
|
||||||
|
124
test/tree.test.js
Normal file
124
test/tree.test.js
Normal file
@ -0,0 +1,124 @@
|
|||||||
|
const hre = require('hardhat')
|
||||||
|
const { ethers, waffle } = hre
|
||||||
|
const { loadFixture } = waffle
|
||||||
|
const { expect } = require('chai')
|
||||||
|
|
||||||
|
const { poseidonHash2, toFixedHex } = require('../src/utils')
|
||||||
|
|
||||||
|
const MERKLE_TREE_HEIGHT = 5
|
||||||
|
const MerkleTree = require('fixed-merkle-tree')
|
||||||
|
|
||||||
|
describe('MerkleTreeWithHistory', function () {
|
||||||
|
this.timeout(20000)
|
||||||
|
|
||||||
|
async function deploy(contractName, ...args) {
|
||||||
|
const Factory = await ethers.getContractFactory(contractName)
|
||||||
|
const instance = await Factory.deploy(...args)
|
||||||
|
return instance.deployed()
|
||||||
|
}
|
||||||
|
|
||||||
|
function getNewTree() {
|
||||||
|
return new MerkleTree(MERKLE_TREE_HEIGHT, [], { hashFunction: poseidonHash2 })
|
||||||
|
}
|
||||||
|
|
||||||
|
async function fixture() {
|
||||||
|
require('../scripts/compileHasher')
|
||||||
|
const hasher = await deploy('Hasher')
|
||||||
|
const merkleTreeWithHistory = await deploy(
|
||||||
|
'MerkleTreeWithHistoryMock',
|
||||||
|
MERKLE_TREE_HEIGHT,
|
||||||
|
hasher.address,
|
||||||
|
)
|
||||||
|
await merkleTreeWithHistory.initialize()
|
||||||
|
return { hasher, merkleTreeWithHistory }
|
||||||
|
}
|
||||||
|
|
||||||
|
// it('should return cloned tree in fixture', async () => {
|
||||||
|
// const { tree: tree1 } = await loadFixture(fixture)
|
||||||
|
// tree1.insert(1)
|
||||||
|
// const { tree: tree2 } = await loadFixture(fixture)
|
||||||
|
// expect(tree1.root()).to.not.equal(tree2.root())
|
||||||
|
// })
|
||||||
|
|
||||||
|
describe('#constructor', () => {
|
||||||
|
it('should correctly hash 2 leaves', async () => {
|
||||||
|
const { hasher, merkleTreeWithHistory } = await loadFixture(fixture)
|
||||||
|
//console.log(hasher)
|
||||||
|
const hash0 = await merkleTreeWithHistory.hashLeftRight(toFixedHex(123), toFixedHex(456))
|
||||||
|
// const hash1 = await hasher.poseidon([123, 456])
|
||||||
|
const hash2 = poseidonHash2(123, 456)
|
||||||
|
expect(hash0).to.equal(hash2)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should initialize', async () => {
|
||||||
|
const { merkleTreeWithHistory } = await loadFixture(fixture)
|
||||||
|
const zeroValue = await merkleTreeWithHistory.ZERO_VALUE()
|
||||||
|
const firstSubtree = await merkleTreeWithHistory.filledSubtrees(0)
|
||||||
|
const firstZero = await merkleTreeWithHistory.zeros(0)
|
||||||
|
expect(firstSubtree).to.be.equal(zeroValue)
|
||||||
|
expect(firstZero).to.be.equal(zeroValue)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should have correct merkle root', async () => {
|
||||||
|
const { merkleTreeWithHistory } = await loadFixture(fixture)
|
||||||
|
const tree = getNewTree()
|
||||||
|
const contractRoot = await merkleTreeWithHistory.getLastRoot()
|
||||||
|
expect(tree.root()).to.equal(contractRoot)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('#insert', () => {
|
||||||
|
it('should insert', async () => {
|
||||||
|
const { merkleTreeWithHistory } = await loadFixture(fixture)
|
||||||
|
const tree = getNewTree()
|
||||||
|
merkleTreeWithHistory.insert(toFixedHex(123), toFixedHex(456))
|
||||||
|
tree.bulkInsert([123, 456])
|
||||||
|
expect(tree.root()).to.be.be.equal(await merkleTreeWithHistory.getLastRoot())
|
||||||
|
|
||||||
|
merkleTreeWithHistory.insert(toFixedHex(678), toFixedHex(876))
|
||||||
|
tree.bulkInsert([678, 876])
|
||||||
|
expect(tree.root()).to.be.be.equal(await merkleTreeWithHistory.getLastRoot())
|
||||||
|
})
|
||||||
|
|
||||||
|
it.skip('hasher gas', async () => {
|
||||||
|
const { hasher } = await loadFixture(fixture)
|
||||||
|
const gas = await hasher.estimateGas.poseidon([123, 456])
|
||||||
|
console.log('hasher gas', gas - 21000)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('#isKnownRoot', () => {
|
||||||
|
async function fixtureFilled() {
|
||||||
|
const { merkleTreeWithHistory, hasher } = await loadFixture(fixture)
|
||||||
|
await merkleTreeWithHistory.insert(toFixedHex(123), toFixedHex(456))
|
||||||
|
return { merkleTreeWithHistory, hasher }
|
||||||
|
}
|
||||||
|
|
||||||
|
it('should return last root', async () => {
|
||||||
|
const { merkleTreeWithHistory } = await fixtureFilled(fixture)
|
||||||
|
const tree = getNewTree()
|
||||||
|
tree.bulkInsert([123, 456])
|
||||||
|
expect(await merkleTreeWithHistory.isKnownRoot(tree.root())).to.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return older root', async () => {
|
||||||
|
const { merkleTreeWithHistory } = await fixtureFilled(fixture)
|
||||||
|
const tree = getNewTree()
|
||||||
|
tree.bulkInsert([123, 456])
|
||||||
|
await merkleTreeWithHistory.insert(toFixedHex(234), toFixedHex(432))
|
||||||
|
expect(await merkleTreeWithHistory.isKnownRoot(tree.root())).to.equal(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should fail on unknown root', async () => {
|
||||||
|
const { merkleTreeWithHistory } = await fixtureFilled(fixture)
|
||||||
|
const tree = getNewTree()
|
||||||
|
tree.bulkInsert([456, 654])
|
||||||
|
expect(await merkleTreeWithHistory.isKnownRoot(tree.root())).to.equal(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should not return uninitialized roots', async () => {
|
||||||
|
const { merkleTreeWithHistory } = await fixtureFilled(fixture)
|
||||||
|
expect(await merkleTreeWithHistory.isKnownRoot(toFixedHex(0))).to.equal(false)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
Loading…
Reference in New Issue
Block a user