hardhat fixes

This commit is contained in:
poma 2021-02-02 14:20:59 +03:00
parent d464eef3f6
commit 71efcb28a7
No known key found for this signature in database
GPG Key ID: BA20CB01FE165657
9 changed files with 137 additions and 529 deletions

View File

@ -16,8 +16,8 @@ contract TornadoTrees is ITornadoTrees, EnsResolve {
address public tornadoProxy;
IVerifier public immutable treeUpdateVerifier;
// make sure CHUNK_TREE_HEIGHT has the same value in BatchTreeUpdate.circom and IVerifier.sol
uint256 public constant CHUNK_TREE_HEIGHT = 7;
// make sure CHUNK_TREE_HEIGHT has the same value in BatchTreeUpdate.circom
uint256 public constant CHUNK_TREE_HEIGHT = 2;
uint256 public constant CHUNK_SIZE = 2**CHUNK_TREE_HEIGHT;
uint256 public constant ITEM_SIZE = 32 + 20 + 4;
uint256 public constant BYTES_SIZE = 32 + 32 + 4 + CHUNK_SIZE * ITEM_SIZE;

View File

@ -1,170 +0,0 @@
// SPDX-License-Identifier: MIT
pragma solidity ^0.6.0;
library Pairing {
uint256 constant PRIME_Q = 21888242871839275222246405745257275088696311157297823662689037894645226208583;
struct G1Point {
uint256 X;
uint256 Y;
}
// Encoding of field elements is: X[0] * z + X[1]
struct G2Point {
uint256[2] X;
uint256[2] Y;
}
/*
* @return The negation of p, i.e. p.plus(p.negate()) should be zero
*/
function negate(G1Point memory p) internal pure returns (G1Point memory) {
// The prime q in the base field F_q for G1
if (p.X == 0 && p.Y == 0) {
return G1Point(0, 0);
} else {
return G1Point(p.X, PRIME_Q - (p.Y % PRIME_Q));
}
}
/*
* @return r the sum of two points of G1
*/
function plus(
G1Point memory p1,
G1Point memory p2
) internal view returns (G1Point memory r) {
uint256[4] memory input = [
p1.X, p1.Y,
p2.X, p2.Y
];
bool success;
// solium-disable-next-line security/no-inline-assembly
assembly {
success := staticcall(sub(gas(), 2000), 6, input, 0xc0, r, 0x60)
// Use "invalid" to make gas estimation work
switch success case 0 { invalid() }
}
require(success, "pairing-add-failed");
}
/*
* @return r the product of a point on G1 and a scalar, i.e.
* p == p.scalarMul(1) and p.plus(p) == p.scalarMul(2) for all
* points p.
*/
function scalarMul(G1Point memory p, uint256 s) internal view returns (G1Point memory r) {
uint256[3] memory input = [p.X, p.Y, s];
bool success;
// solium-disable-next-line security/no-inline-assembly
assembly {
success := staticcall(sub(gas(), 2000), 7, input, 0x80, r, 0x60)
// Use "invalid" to make gas estimation work
switch success case 0 { invalid() }
}
require(success, "pairing-mul-failed");
}
/* @return The result of computing the pairing check
* e(p1[0], p2[0]) * .... * e(p1[n], p2[n]) == 1
* For example,
* pairing([P1(), P1().negate()], [P2(), P2()]) should return true.
*/
function pairing(
G1Point memory a1,
G2Point memory a2,
G1Point memory b1,
G2Point memory b2,
G1Point memory c1,
G2Point memory c2,
G1Point memory d1,
G2Point memory d2
) internal view returns (bool) {
uint256[24] memory input = [
a1.X, a1.Y, a2.X[0], a2.X[1], a2.Y[0], a2.Y[1],
b1.X, b1.Y, b2.X[0], b2.X[1], b2.Y[0], b2.Y[1],
c1.X, c1.Y, c2.X[0], c2.X[1], c2.Y[0], c2.Y[1],
d1.X, d1.Y, d2.X[0], d2.X[1], d2.Y[0], d2.Y[1]
];
uint256[1] memory out;
bool success;
// solium-disable-next-line security/no-inline-assembly
assembly {
success := staticcall(sub(gas(), 2000), 8, input, mul(24, 0x20), out, 0x20)
// Use "invalid" to make gas estimation work
switch success case 0 { invalid() }
}
require(success, "pairing-opcode-failed");
return out[0] != 0;
}
}
contract BatchTreeUpdateVerifier {
uint256 constant SNARK_SCALAR_FIELD = 21888242871839275222246405745257275088548364400416034343698204186575808495617;
uint256 constant PRIME_Q = 21888242871839275222246405745257275088696311157297823662689037894645226208583;
using Pairing for *;
struct VerifyingKey {
Pairing.G1Point alfa1;
Pairing.G2Point beta2;
Pairing.G2Point gamma2;
Pairing.G2Point delta2;
Pairing.G1Point[2] IC;
}
function verifyingKey() internal pure returns (VerifyingKey memory vk) {
vk.alfa1 = Pairing.G1Point(uint256(20475789791681002364587166738311620805815985969091106757478379420262430093495), uint256(3034180384279528157431123624668892018871098425968640214767822771352219138078));
vk.beta2 = Pairing.G2Point([uint256(347992840312110670849483472224503623225781749273259516677464742758581199694), uint256(16853081403278411985324640353650047676779142117029386935051386044282804346484)], [uint256(10461241566647602546027012417757263991485755060136522105605550609788790829933), uint256(16049761706815422591462572571571264938897676292217555774707799384732883004386)]);
vk.gamma2 = Pairing.G2Point([uint256(5535450215937949788522672716791294482208969162172756729752675877422249461391), uint256(4537903555000997751027892507073556632992848536024556182449526590439971414042)], [uint256(6688278057604431581483695896713912024597719708930089928002132340517626404891), uint256(15745439923152020754042431613052318298038129099865656040309120795605091105487)]);
vk.delta2 = Pairing.G2Point([uint256(10712491908603553476637447918495381165104059355722416702328240143919146641319), uint256(15855442659923189569787773688895011287546687523233653745264460947047886121140)], [uint256(18278088599243830423965796542892879791365910862597475788753708589843343437901), uint256(10765606859348375283724614934374540130725132299795942405716724739350245709734)]);
vk.IC[0] = Pairing.G1Point(uint256(18147360875100520747353841225428915644191762631193821400291387675910597374366), uint256(17222433096548585553756828362569506045947134360392537102794184064340219776032));
vk.IC[1] = Pairing.G1Point(uint256(3514632146136652297064638325657684436433185732623721288055192259268961814948), uint256(8363257337389338977321440370428118205387545635573906956020792115766452976369));
}
/*
* @returns Whether the proof is valid given the hardcoded verifying key
* above and the public inputs
*/
function verifyProof(
bytes memory proof,
uint256[1] memory input
) public view returns (bool) {
uint256[8] memory p = abi.decode(proof, (uint256[8]));
for (uint8 i = 0; i < p.length; i++) {
// Make sure that each element in the proof is less than the prime q
require(p[i] < PRIME_Q, "verifier-proof-element-gte-prime-q");
}
Pairing.G1Point memory proofA = Pairing.G1Point(p[0], p[1]);
Pairing.G2Point memory proofB = Pairing.G2Point([p[2], p[3]], [p[4], p[5]]);
Pairing.G1Point memory proofC = Pairing.G1Point(p[6], p[7]);
VerifyingKey memory vk = verifyingKey();
// Compute the linear combination vkX
Pairing.G1Point memory vkX = vk.IC[0];
for (uint256 i = 0; i < input.length; i++) {
// Make sure that every input is less than the snark scalar field
require(input[i] < SNARK_SCALAR_FIELD, "verifier-input-gte-snark-scalar-field");
vkX = Pairing.plus(vkX, Pairing.scalarMul(vk.IC[i + 1], input[i]));
}
return Pairing.pairing(
Pairing.negate(proofA),
proofB,
vk.alfa1,
vk.beta2,
vkX,
vk.gamma2,
proofC,
vk.delta2
);
}
}

View File

@ -0,0 +1 @@
../../artifacts/circuits/BatchTreeUpdateVerifier.sol

View File

@ -1,10 +1,11 @@
#!/bin/bash -e
mkdir -p artifacts/circuits
if [ "$2" = "large" ]; then
npx circom -v -f -r build/circuits/$1.r1cs -c build/circuits/$1.cpp -s build/circuits/$1.sym circuits/$1.circom
npx circom -v -f -r artifacts/circuits/$1.r1cs -c artifacts/circuits/$1.cpp -s artifacts/circuits/$1.sym circuits/$1.circom
else
npx circom -v -r build/circuits/$1.r1cs -w build/circuits/$1.wasm -s build/circuits/$1.sym circuits/$1.circom
npx circom -v -r artifacts/circuits/$1.r1cs -w artifacts/circuits/$1.wasm -s artifacts/circuits/$1.sym circuits/$1.circom
fi
zkutil setup -c build/circuits/$1.r1cs -p build/circuits/$1.params
zkutil generate-verifier -p build/circuits/$1.params -v build/circuits/${1}Verifier.sol
sed -i.bak "s/contract Verifier/contract ${1}Verifier/g" build/circuits/${1}Verifier.sol
npx snarkjs info -r build/circuits/$1.r1cs
zkutil setup -c artifacts/circuits/$1.r1cs -p artifacts/circuits/$1.params
zkutil generate-verifier -p artifacts/circuits/$1.params -v artifacts/circuits/${1}Verifier.sol
sed -i.bak "s/contract Verifier/contract ${1}Verifier/g" artifacts/circuits/${1}Verifier.sol
npx snarkjs info -r artifacts/circuits/$1.r1cs

View File

@ -1,11 +1,15 @@
const ethers = require('ethers')
const BigNumber = ethers.BigNumber
const {
bitsToNumber,
toFixedHex,
toBuffer,
poseidonHash,
poseidonHash2,
} = require('./utils')
const jsSHA = require('jssha')
const { toBN } = require('web3-utils')
const fs = require('fs')
const tmp = require('tmp-promise')
@ -14,18 +18,18 @@ const exec = util.promisify(require('child_process').exec)
function hashInputs(input) {
const sha = new jsSHA('SHA-256', 'ARRAYBUFFER')
sha.update(toBN(input.oldRoot).toBuffer('be', 32))
sha.update(toBN(input.newRoot).toBuffer('be', 32))
sha.update(toBN(input.pathIndices).toBuffer('be', 4))
sha.update(toBuffer(input.oldRoot, 32))
sha.update(toBuffer(input.newRoot, 32))
sha.update(toBuffer(input.pathIndices, 4))
for (let i = 0; i < input.instances.length; i++) {
sha.update(toBN(input.hashes[i]).toBuffer('be', 32))
sha.update(toBN(input.instances[i]).toBuffer('be', 20))
sha.update(toBN(input.blocks[i]).toBuffer('be', 4))
sha.update(toBuffer(input.hashes[i], 32))
sha.update(toBuffer(input.instances[i], 20))
sha.update(toBuffer(input.blocks[i], 4))
}
const hash = sha.getHash('HEX')
const result = toBN(hash).mod(toBN('21888242871839275222246405745257275088548364400416034343698204186575808495617')).toString()
const hash = '0x' + sha.getHash('HEX')
const result = BigNumber.from(hash).mod(BigNumber.from('21888242871839275222246405745257275088548364400416034343698204186575808495617')).toString()
return result
}
@ -63,7 +67,7 @@ function batchTreeUpdate(tree, events) {
tree.bulkInsert(leaves)
const newRoot = tree.root().toString()
let { pathElements, pathIndices } = tree.path(tree.elements().length - 1)
pathElements = pathElements.slice(batchHeight).map(a => toBN(a).toString())
pathElements = pathElements.slice(batchHeight).map(a => BigNumber.from(a).toString())
pathIndices = bitsToNumber(pathIndices.slice(batchHeight)).toString()
const input = {
@ -71,9 +75,9 @@ function batchTreeUpdate(tree, events) {
newRoot,
pathIndices,
pathElements,
instances: events.map((e) => toBN(e.instance).toString()),
hashes: events.map((e) => toBN(e.hash).toString()),
blocks: events.map((e) => toBN(e.block).toString()),
instances: events.map((e) => BigNumber.from(e.instance).toString()),
hashes: events.map((e) => BigNumber.from(e.hash).toString()),
blocks: events.map((e) => BigNumber.from(e.block).toString()),
}
input.argsHash = hashInputs(input)

View File

@ -1,119 +1,21 @@
const crypto = require('crypto')
const Decimal = require('decimal.js')
const { bigInt } = require('snarkjs')
const { toBN, BN, soliditySha3 } = require('web3-utils')
const Web3 = require('web3')
const web3 = new Web3()
const { babyJub, pedersenHash, mimcsponge, poseidon } = require('circomlib')
const ethers = require('ethers')
const BigNumber = ethers.BigNumber
const { poseidon } = require('circomlib')
const RewardExtData = {
RewardExtData: {
relayer: 'address',
encryptedAccount: 'bytes',
},
}
const AccountUpdate = {
AccountUpdate: {
inputRoot: 'bytes32',
inputNullifierHash: 'bytes32',
outputRoot: 'bytes32',
outputPathIndices: 'uint256',
outputCommitment: 'bytes32',
},
}
const RewardArgs = {
RewardArgs: {
rate: 'uint256',
fee: 'uint256',
instance: 'address',
rewardNullifier: 'bytes32',
extDataHash: 'bytes32',
depositRoot: 'bytes32',
withdrawalRoot: 'bytes32',
extData: RewardExtData.RewardExtData,
account: AccountUpdate.AccountUpdate,
},
}
const WithdrawExtData = {
WithdrawExtData: {
fee: 'uint256',
recipient: 'address',
relayer: 'address',
encryptedAccount: 'bytes',
},
}
const pedersenHashBuffer = (buffer) => toBN(babyJub.unpackPoint(pedersenHash.hash(buffer))[0].toString())
const mimcHash = (items) => toBN(mimcsponge.multiHash(items.map((item) => bigInt(item))).toString())
const poseidonHash = (items) => toBN(poseidon(items).toString())
const poseidonHash = (items) => BigNumber.from(poseidon(items).toString())
const poseidonHash2 = (a, b) => poseidonHash([a, b])
/** Generate random number of specified byte length */
const randomBN = (nbytes = 31) => new BN(crypto.randomBytes(nbytes))
const randomBN = (nbytes = 31) => BigNumber.from(crypto.randomBytes(nbytes))
/** BigNumber to hex string of specified length */
const toFixedHex = (number, length = 32) =>
'0x' +
(number instanceof Buffer ? number.toString('hex') : toBN(number).toString(16)).padStart(length * 2, '0')
(number instanceof Buffer ? number.toString('hex') : BigNumber.from(number).toHexString().slice(2)).padStart(length * 2, '0')
function getExtRewardArgsHash({ relayer, encryptedAccount }) {
const encodedData = web3.eth.abi.encodeParameters(
[RewardExtData],
[{ relayer: toFixedHex(relayer, 20), encryptedAccount }],
)
const hash = soliditySha3({ t: 'bytes', v: encodedData })
return '0x00' + hash.slice(4) // cut last byte to make it 31 byte long to fit the snark field
}
function getExtWithdrawArgsHash({ fee, recipient, relayer, encryptedAccount }) {
const encodedData = web3.eth.abi.encodeParameters(
[WithdrawExtData],
[
{
fee: toFixedHex(fee, 32),
recipient: toFixedHex(recipient, 20),
relayer: toFixedHex(relayer, 20),
encryptedAccount,
},
],
)
const hash = soliditySha3({ t: 'bytes', v: encodedData })
return '0x00' + hash.slice(4) // cut first byte to make it 31 byte long to fit the snark field
}
function packEncryptedMessage(encryptedMessage) {
const nonceBuf = Buffer.from(encryptedMessage.nonce, 'base64')
const ephemPublicKeyBuf = Buffer.from(encryptedMessage.ephemPublicKey, 'base64')
const ciphertextBuf = Buffer.from(encryptedMessage.ciphertext, 'base64')
const messageBuff = Buffer.concat([
Buffer.alloc(24 - nonceBuf.length),
nonceBuf,
Buffer.alloc(32 - ephemPublicKeyBuf.length),
ephemPublicKeyBuf,
ciphertextBuf,
])
return '0x' + messageBuff.toString('hex')
}
function unpackEncryptedMessage(encryptedMessage) {
if (encryptedMessage.slice(0, 2) === '0x') {
encryptedMessage = encryptedMessage.slice(2)
}
const messageBuff = Buffer.from(encryptedMessage, 'hex')
const nonceBuf = messageBuff.slice(0, 24)
const ephemPublicKeyBuf = messageBuff.slice(24, 56)
const ciphertextBuf = messageBuff.slice(56)
return {
version: 'x25519-xsalsa20-poly1305',
nonce: nonceBuf.toString('base64'),
ephemPublicKey: ephemPublicKeyBuf.toString('base64'),
ciphertext: ciphertextBuf.toString('base64'),
}
}
const toBuffer = (value, length) => Buffer.from(BigNumber.from(value).toHexString().slice(2).padStart(length * 2, '0'), 'hex')
function bitsToNumber(bits) {
let result = 0
@ -123,43 +25,11 @@ function bitsToNumber(bits) {
return result
}
// a = floor(10**18 * e^(-0.0000000001 * amount))
// yield = BalBefore - (BalBefore * a)/10**18
function tornadoFormula({ balance, amount, poolWeight = 1e10 }) {
const decimals = new Decimal(10 ** 18)
balance = new Decimal(balance.toString())
amount = new Decimal(amount.toString())
poolWeight = new Decimal(poolWeight.toString())
const power = amount.div(poolWeight).negated()
const exponent = Decimal.exp(power).mul(decimals)
const newBalance = balance.mul(exponent).div(decimals)
return toBN(balance.sub(newBalance).toFixed(0))
}
function reverseTornadoFormula({ balance, tokens, poolWeight = 1e10 }) {
balance = new Decimal(balance.toString())
tokens = new Decimal(tokens.toString())
poolWeight = new Decimal(poolWeight.toString())
return toBN(poolWeight.times(Decimal.ln(balance.div(balance.sub(tokens)))).toFixed(0))
}
module.exports = {
randomBN,
pedersenHashBuffer,
bitsToNumber,
getExtRewardArgsHash,
getExtWithdrawArgsHash,
packEncryptedMessage,
unpackEncryptedMessage,
toFixedHex,
mimcHash,
toBuffer,
poseidonHash,
poseidonHash2,
tornadoFormula,
reverseTornadoFormula,
RewardArgs,
RewardExtData,
AccountUpdate,
}

View File

@ -1,39 +1,32 @@
/* global artifacts, web3, contract */
require('chai').use(require('bn-chai')(web3.utils.BN)).use(require('chai-as-promised')).should()
const { takeSnapshot, revertSnapshot } = require('../scripts/ganacheHelper')
const Ccntroller = require('../src/controller')
const { toBN } = require('web3-utils')
const Pack = artifacts.require('Pack')
const jsSHA = require('jssha')
const { poseidonHash2 } = require('../src/utils')
const { expect } = require("chai")
const MerkleTree = require('fixed-merkle-tree')
const jsSHA = require('jssha')
const { poseidonHash2 } = require('../src/utils')
const { batchTreeUpdate, prove } = require('../src/controller')
const instances = [
'0xc6325fa78E0764993Bf2997116A3771bCbcb3fa9',
'0xb70738422D0f9d1225300eE0Fc67e7392095567d',
'0xA675B536203a123B0214cdf1EBb1298F440dA19A',
'0xFA1835cf197C3281Dc993a63bb160026dAC98bF3',
]
const hashes = [
'0x6f44cd7458bf24f65851fa8097712e3a8d9a6f3e387c501b285338308a74b8f3',
'0xafd3103939b7b0cd7a0ad1ddac57dd13af7f2825a21b47ae995b5bb0f767a106',
'0x57f7b90a3cb4ea6860e6dd5fa44ac4f53ebe6ae3948af577a01ef51738313246'
]
const levels = 20
const CHUNK_TREE_HEIGHT = 7
contract.skip('Pack', (accounts) => {
let pack
let snapshotId
const instances = [
'0xc6325fa78E0764993Bf2997116A3771bCbcb3fa9',
'0xb70738422D0f9d1225300eE0Fc67e7392095567d',
'0xA675B536203a123B0214cdf1EBb1298F440dA19A',
'0xFA1835cf197C3281Dc993a63bb160026dAC98bF3',
]
const hashes = [
'0x6f44cd7458bf24f65851fa8097712e3a8d9a6f3e387c501b285338308a74b8f3',
'0xafd3103939b7b0cd7a0ad1ddac57dd13af7f2825a21b47ae995b5bb0f767a106',
'0x57f7b90a3cb4ea6860e6dd5fa44ac4f53ebe6ae3948af577a01ef51738313246'
]
const notes = []
before(async () => {
const emptyTree = new MerkleTree(levels, [], { hashFunction: poseidonHash2 })
pack = await Pack.new()
describe.skip('Pack', () => {
it('should work', async () => {
const tree = new MerkleTree(levels, [], { hashFunction: poseidonHash2 })
const Pack = await ethers.getContractFactory("Pack")
const pack = await Pack.deploy()
const notes = []
for (let i = 0; i < 2 ** CHUNK_TREE_HEIGHT; i++) {
notes[i] = {
instance: instances[i % instances.length],
@ -41,37 +34,14 @@ contract.skip('Pack', (accounts) => {
block: 1 + i,
}
}
const receipt = await pack.pack2(notes.map(a => a.hash), notes.map(a => a.instance), notes.map(a => a.block))
const receipt2 = await receipt.wait()
snapshotId = await takeSnapshot()
})
describe('#pack', () => {
it('gastest', async () => {
const emptyTree = new MerkleTree(levels, [], { hashFunction: poseidonHash2 })
const receipt = await pack.pack2(notes.map(a => a.hash), notes.map(a => a.instance), notes.map(a => a.block), { gas: 6e6 })
console.log('total', receipt.receipt.gasUsed)
const sha = new jsSHA('SHA-256', 'ARRAYBUFFER')
for (let i = 0; i < notes.length; i++) {
sha.update(toBN(notes[i].hash).toBuffer('be', 32))
sha.update(toBN(notes[i].instance).toBuffer('be', 20))
sha.update(toBN(notes[i].block).toBuffer('be', 4))
}
const hash = sha.getHash('HEX')
const solHash = await pack.hash()
solHash.should.be.equal('0x' + hash)
console.log('batch size', notes.length)
console.log('events', (await pack.gas1()).toString())
console.log('hash', (await pack.gas2()).toString())
console.log('bytes',(await pack.gas3()).toString())
console.log('calldata', toBN(6e6).sub(await pack.gas4()).toString())
})
})
afterEach(async () => {
await revertSnapshot(snapshotId.result)
// eslint-disable-next-line require-atomic-updates
snapshotId = await takeSnapshot()
console.log(`total ${receipt2.gasUsed}`)
console.log(`batch size ${notes.length}`)
console.log(`events ${await pack.gas1()}`)
console.log(`hash ${await pack.gas2()}`)
console.log(`bytes ${await pack.gas3()}`)
console.log(`calldata ${receipt.gasLimit.sub(await pack.gas4())}`)
})
})

View File

@ -1,14 +0,0 @@
const { expect } = require("chai");
describe("Greeter", function() {
it("Should return the new greeting once it's changed", async function() {
const Greeter = await ethers.getContractFactory("Greeter");
const greeter = await Greeter.deploy("Hello, world!");
await greeter.deployed();
expect(await greeter.greet()).to.equal("Hello, world!");
await greeter.setGreeting("Hola, mundo!");
expect(await greeter.greet()).to.equal("Hola, mundo!");
});
});

View File

@ -1,12 +1,12 @@
/* global artifacts, web3, contract */
require('chai').use(require('bn-chai')(web3.utils.BN)).use(require('chai-as-promised')).should()
const { expect } = require("chai")
const MerkleTree = require('fixed-merkle-tree')
const { poseidonHash2, randomBN } = require('../src/utils')
const { batchTreeUpdate, prove } = require('../src/controller')
const levels = 20
const CHUNK_TREE_HEIGHT = 2
contract('Snark', () => {
describe('Snark', () => {
it('should work', async () => {
const tree = new MerkleTree(levels, [], { hashFunction: poseidonHash2 })
const events = []
@ -18,6 +18,6 @@ contract('Snark', () => {
})
}
const data = await batchTreeUpdate(tree, events)
const proof = await prove(data, './build/circuits/BatchTreeUpdate')
const proof = await prove(data, './artifacts/circuits/BatchTreeUpdate')
})
})

View File

@ -1,95 +1,59 @@
/* global artifacts, web3, contract */
require('chai').use(require('bn-chai')(web3.utils.BN)).use(require('chai-as-promised')).should()
const { takeSnapshot, revertSnapshot } = require('../scripts/ganacheHelper')
const controller = require('../src/controller')
const TornadoTrees = artifacts.require('TornadoTreesMock')
const BatchTreeUpdateVerifier = artifacts.require('BatchTreeUpdateVerifier')
const { toBN } = require('web3-utils')
const { toBN } = require('web3-utils')
const { expect } = require("chai")
const { toFixedHex, poseidonHash2, randomBN } = require('../src/utils')
const MerkleTree = require('fixed-merkle-tree')
async function registerDeposit(note, tornadoTrees, from) {
await tornadoTrees.setBlockNumber(note.depositBlock)
await tornadoTrees.registerDeposit(note.instance, toFixedHex(note.commitment), { from })
return {
instance: note.instance,
hash: toFixedHex(note.commitment),
block: toFixedHex(note.depositBlock),
}
}
async function registerWithdrawal(note, tornadoTrees, from) {
await tornadoTrees.setBlockNumber(note.withdrawalBlock)
await tornadoTrees.registerWithdrawal(note.instance, toFixedHex(note.nullifierHash), { from })
return {
instance: note.instance,
hash: toFixedHex(note.nullifierHash),
block: toFixedHex(note.withdrawalBlock),
}
}
const controller = require('../src/controller')
async function register(note, tornadoTrees, from) {
await tornadoTrees.register(
await tornadoTrees.connect(from).register(
note.instance,
toFixedHex(note.commitment),
toFixedHex(note.nullifierHash),
note.depositBlock,
note.withdrawalBlock,
{
from,
},
)
return {
instance: note.instance,
hash: toFixedHex(note.nullifierHash),
block: toFixedHex(note.withdrawalBlock),
}
}
const toEns = (addr) => toFixedHex(addr, 20).padEnd(66, '0')
const levels = 20
const CHUNK_TREE_HEIGHT = 2
contract('TornadoTrees', (accounts) => {
let tornadoTrees
const instances = [
'0x1111000000000000000000000000000000001111',
'0x2222000000000000000000000000000000002222',
'0x3333000000000000000000000000000000003333',
'0x4444000000000000000000000000000000004444',
]
const blocks = ['0xaaaaaaaa', '0xbbbbbbbb', '0xcccccccc', '0xdddddddd']
describe("TornadoTrees", function() {
let tree
let operator
let tornadoProxy
let verifier
// let controller
let snapshotId
let tornadoProxy = accounts[0]
let operator = accounts[0]
let tornadoTrees
let notes
let events
const instances = [
'0x1111000000000000000000000000000000001111',
'0x2222000000000000000000000000000000002222',
'0x3333000000000000000000000000000000003333',
'0x4444000000000000000000000000000000004444',
]
beforeEach(async function() {
tree = new MerkleTree(levels, [], { hashFunction: poseidonHash2 })
;[operator, tornadoProxy] = await ethers.getSigners()
const blocks = ['0xaaaaaaaa', '0xbbbbbbbb', '0xcccccccc', '0xdddddddd']
const BatchTreeUpdateVerifier = await ethers.getContractFactory("BatchTreeUpdateVerifier")
verifier = await BatchTreeUpdateVerifier.deploy()
const notes = []
before(async () => {
const emptyTree = new MerkleTree(levels, [], { hashFunction: poseidonHash2 })
verifier = await BatchTreeUpdateVerifier.new()
tornadoTrees = await TornadoTrees.new(
operator,
tornadoProxy,
verifier.address,
toFixedHex(emptyTree.root()),
toFixedHex(emptyTree.root()),
const TornadoTrees = await ethers.getContractFactory("TornadoTreesMock")
tornadoTrees = await TornadoTrees.deploy(
toEns(operator.address),
toEns(tornadoProxy.address),
toEns(verifier.address),
toFixedHex(tree.root()),
toFixedHex(tree.root()),
)
// controller = new Controller({
// contract: '',
// tornadoTreesContract: tornadoTrees,
// merkleTreeHeight: levels,
// provingKeys,
// })
// await controller.init()
notes = []
for (let i = 0; i < 2 ** CHUNK_TREE_HEIGHT; i++) {
// onsole.log('i', i)
notes[i] = {
instance: instances[i % instances.length],
depositBlock: blocks[i % blocks.length],
@ -100,60 +64,42 @@ contract('TornadoTrees', (accounts) => {
await register(notes[i], tornadoTrees, tornadoProxy)
}
snapshotId = await takeSnapshot()
events = notes.map((note) => ({
hash: toFixedHex(note.commitment),
instance: toFixedHex(note.instance, 20),
block: toFixedHex(note.depositBlock, 4),
}))
})
describe('#updateDepositTree', () => {
it('should check hash', async () => {
const emptyTree = new MerkleTree(levels, [], { hashFunction: poseidonHash2 })
const events = notes.map((note) => ({
hash: toFixedHex(note.commitment),
instance: toFixedHex(note.instance, 20),
block: toFixedHex(note.depositBlock, 4),
}))
const data = await controller.batchTreeUpdate(emptyTree, events)
const solHash = await tornadoTrees.updateDepositTreeMock(
toFixedHex(data.oldRoot),
toFixedHex(data.newRoot),
toFixedHex(data.pathIndices, 4),
events,
)
toBN(data.argsHash).should.be.eq.BN(solHash)
})
it('should prove snark', async () => {
const emptyTree = new MerkleTree(levels, [], { hashFunction: poseidonHash2 })
const events = notes.map((note) => ({
hash: toFixedHex(note.commitment),
instance: toFixedHex(note.instance, 20),
block: toFixedHex(note.depositBlock, 4),
}))
const data = await controller.batchTreeUpdate(emptyTree, events)
const proof = await controller.prove(data, './build/circuits/BatchTreeUpdate')
await tornadoTrees.updateDepositTree(
proof,
toFixedHex(data.argsHash),
toFixedHex(data.oldRoot),
toFixedHex(data.newRoot),
toFixedHex(data.pathIndices, 4),
events,
)
const updatedRoot = await tornadoTrees.depositRoot()
updatedRoot.should.be.eq.BN(toBN(toFixedHex(data.newRoot)))
})
it('should work for non-empty tree')
it('should reject for partially filled tree')
it('should reject for outdated deposit root')
it('should reject for incorrect insert index')
it('should reject for overflows of newRoot')
it('should reject for invalid sha256 args')
it("Should calculate hash", async function() {
const data = await controller.batchTreeUpdate(tree, events)
const solHash = await tornadoTrees.updateDepositTreeMock(
toFixedHex(data.oldRoot),
toFixedHex(data.newRoot),
toFixedHex(data.pathIndices, 4),
events,
)
expect(solHash).to.be.equal(data.argsHash)
})
afterEach(async () => {
await revertSnapshot(snapshotId.result)
// eslint-disable-next-line require-atomic-updates
snapshotId = await takeSnapshot()
it("Should calculate hash", async function() {
const data = await controller.batchTreeUpdate(tree, events)
const proof = await controller.prove(data, './artifacts/circuits/BatchTreeUpdate')
await tornadoTrees.updateDepositTree(
proof,
toFixedHex(data.argsHash),
toFixedHex(data.oldRoot),
toFixedHex(data.newRoot),
toFixedHex(data.pathIndices, 4),
events,
)
expect(await tornadoTrees.depositRoot()).to.be.equal(tree.root())
})
it('should work for non-empty tree')
it('should reject for partially filled tree')
it('should reject for outdated deposit root')
it('should reject for incorrect insert index')
it('should reject for overflows of newRoot')
it('should reject for invalid sha256 args')
})