This commit is contained in:
poma 2021-06-06 20:31:32 +03:00
parent 4122e1e90c
commit 516d599502
No known key found for this signature in database
GPG Key ID: BA20CB01FE165657
20 changed files with 993 additions and 616 deletions

2
.gitignore vendored
View File

@ -1,2 +1,4 @@
node_modules node_modules
build build
cache
artifacts

View File

@ -1,4 +1,4 @@
include "../node_modules/circomlib/circuits/mimcsponge.circom"; include "../node_modules/circomlib/circuits/poseidon.circom";
// Computes MiMC([left, right]) // Computes MiMC([left, right])
template HashLeftRight() { template HashLeftRight() {
@ -6,11 +6,10 @@ template HashLeftRight() {
signal input right; signal input right;
signal output hash; signal output hash;
component hasher = MiMCSponge(2, 1); component hasher = Poseidon(2);
hasher.ins[0] <== left; hasher.inputs[0] <== left;
hasher.ins[1] <== right; hasher.inputs[1] <== right;
hasher.k <== 0; hash <== hasher.out;
hash <== hasher.outs[0];
} }
// if s == 0 returns [in[0], in[1]] // if s == 0 returns [in[0], in[1]]

View File

@ -121,4 +121,4 @@ template Transaction(levels, zeroLeaf) {
} }
} }
component main = Transaction(5, 16923532097304556005972200564242292693309333953544141029519619077135960040221); component main = Transaction(5, 11850551329423159860688778991827824730037759162201783566284850822760196767874);

View File

@ -1,17 +1,15 @@
include "../node_modules/circomlib/circuits/pointbits.circom"; include "../node_modules/circomlib/circuits/pointbits.circom";
include "../node_modules/circomlib/circuits/compconstant.circom"; include "../node_modules/circomlib/circuits/compconstant.circom";
include "../node_modules/circomlib/circuits/mimcsponge.circom"; include "../node_modules/circomlib/circuits/poseidon.circom";
template Keypair() { template Keypair() {
signal input privateKey; signal input privateKey;
signal output publicKey; signal output publicKey;
component hasher = MiMCSponge(1, 1); component hasher = Poseidon(1);
hasher.ins[0] <== privateKey; hasher.inputs[0] <== privateKey;
hasher.k <== 0; publicKey <== hasher.out;
publicKey <== hasher.outs[0];
} }
template TransactionHasher() { template TransactionHasher() {
@ -21,13 +19,11 @@ template TransactionHasher() {
signal output commitment; signal output commitment;
component hasher = MiMCSponge(3, 1); component hasher = Poseidon(3);
hasher.ins[0] <== amount; hasher.inputs[0] <== amount;
hasher.ins[1] <== blinding; hasher.inputs[1] <== blinding;
hasher.ins[2] <== publicKey; hasher.inputs[2] <== publicKey;
hasher.k <== 0; commitment <== hasher.out;
commitment <== hasher.outs[0];
} }
template NullifierHasher() { template NullifierHasher() {
@ -37,11 +33,9 @@ template NullifierHasher() {
signal output nullifier; signal output nullifier;
component hasher = MiMCSponge(3, 1); component hasher = Poseidon(3);
hasher.ins[0] <== commitment; hasher.inputs[0] <== commitment;
hasher.ins[1] <== merklePath; hasher.inputs[1] <== merklePath;
hasher.ins[2] <== privateKey; hasher.inputs[2] <== privateKey;
hasher.k <== 0; nullifier <== hasher.out;
nullifier <== hasher.outs[0];
} }

View File

@ -1,3 +1,4 @@
// SPDX-License-Identifier: MIT
// https://tornado.cash // https://tornado.cash
/* /*
* d888888P dP a88888b. dP * d888888P dP a88888b. dP
@ -9,12 +10,12 @@
* ooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo * ooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo
*/ */
pragma solidity ^0.5.8; pragma solidity ^0.6.0;
import "@openzeppelin/contracts/utils/ReentrancyGuard.sol"; // todo: maybe remove? import "@openzeppelin/contracts/utils/ReentrancyGuard.sol"; // todo: maybe remove?
contract IVerifier { interface IVerifier {
function verifyProof(bytes memory _proof, uint256[10] memory _input) public returns(bool); function verifyProof(bytes memory _proof, uint256[10] memory _input) external returns(bool);
} }
contract TornadoPool is ReentrancyGuard { contract TornadoPool is ReentrancyGuard {
@ -78,11 +79,11 @@ contract TornadoPool is ReentrancyGuard {
require(msg.value == uint256(extAmount), "Incorrect amount of ETH sent on deposit"); require(msg.value == uint256(extAmount), "Incorrect amount of ETH sent on deposit");
} else { } else {
require(msg.value == 0, "Sent ETH amount should be 0 for withdrawal"); require(msg.value == 0, "Sent ETH amount should be 0 for withdrawal");
transfer(_recipient, uint256(-extAmount)); _recipient.transfer(uint256(-extAmount));
} }
if (_fee > 0) { if (_fee > 0) {
transfer(_relayer, _fee); _recipient.transfer(_fee);
} }
// todo enforce currentCommitmentIndex value in snark // todo enforce currentCommitmentIndex value in snark
@ -105,23 +106,18 @@ contract TornadoPool is ReentrancyGuard {
} }
} }
function transfer(address payable to, uint256 amount) internal {
(bool success, ) = to.call.value(amount)("");
require(success, "payment did not go through");
}
/** @dev whether a note is already spent */ /** @dev whether a note is already spent */
function isSpent(bytes32 _nullifierHash) public view returns(bool) { function isSpent(bytes32 _nullifierHash) public view returns(bool) {
return nullifierHashes[_nullifierHash]; return nullifierHashes[_nullifierHash];
} }
/** @dev whether an array of notes is already spent */ // /** @dev whether an array of notes is already spent */
function isSpentArray(bytes32[] calldata _nullifierHashes) external view returns(bool[] memory spent) { // function isSpentArray(bytes32[] calldata _nullifierHashes) external view returns(bool[] memory spent) {
spent = new bool[](_nullifierHashes.length); // spent = new bool[](_nullifierHashes.length);
for(uint i = 0; i < _nullifierHashes.length; i++) { // for(uint i = 0; i < _nullifierHashes.length; i++) {
if (isSpent(_nullifierHashes[i])) { // if (isSpent(_nullifierHashes[i])) {
spent[i] = true; // spent[i] = true;
} // }
} // }
} // }
} }

View File

@ -1 +1 @@
../build/circuits/Verifier.sol ../artifacts/circuits/Verifier.sol

View File

@ -4,7 +4,7 @@ require('dotenv').config()
const config = { const config = {
solidity: { solidity: {
version: '0.5.10', version: '0.6.12',
settings: { settings: {
optimizer: { optimizer: {
enabled: true, enabled: true,
@ -13,22 +13,14 @@ const config = {
}, },
}, },
networks: { networks: {
hardhat: { // goerli: {
gasPrice: 0, // url: process.env.ETH_RPC,
chainId: 1, // accounts: process.env.PRIVATE_KEY
forking: { // ? [process.env.PRIVATE_KEY]
url: process.env.ETH_RPC, // : {
blockNumber: 12197930, // mnemonic: 'test test test test test test test test test test test junk',
}, // },
}, // },
goerli: {
url: process.env.ETH_RPC,
accounts: process.env.PRIVATE_KEY
? [process.env.PRIVATE_KEY]
: {
mnemonic: 'test test test test test test test test test test test junk',
},
},
}, },
mocha: { mocha: {
timeout: 600000000, timeout: 600000000,

View File

@ -1,201 +0,0 @@
const jsStorage = require('./storage')
const hasherImpl = require('./mimc')
class MerkleTree {
constructor(n_levels, defaultElements, prefix, storage, hasher) {
this.prefix = prefix
this.storage = storage || new jsStorage()
this.hasher = hasher || new hasherImpl()
this.n_levels = n_levels
this.zero_values = []
this.totalElements = 0
let current_zero_value = '21663839004416932945382355908790599225266501822907911457504978515578255421292'
this.zero_values.push(current_zero_value)
for (let i = 0; i < n_levels; i++) {
current_zero_value = this.hasher.hash(i, current_zero_value, current_zero_value)
this.zero_values.push(
current_zero_value.toString(),
)
}
if (defaultElements) {
let level = 0
this.totalElements = defaultElements.length
defaultElements.forEach((element, i) => {
this.storage.put(MerkleTree.index_to_key(prefix, level, i), element)
})
level++
let numberOfElementsInLevel = Math.ceil(defaultElements.length / 2)
for (level; level <= this.n_levels; level++) {
for(let i = 0; i < numberOfElementsInLevel; i++) {
const leftKey = MerkleTree.index_to_key(prefix, level - 1, 2 * i)
const rightKey = MerkleTree.index_to_key(prefix, level - 1, 2 * i + 1)
const left = this.storage.get(leftKey)
const right = this.storage.get_or_element(rightKey, this.zero_values[level - 1])
const subRoot = this.hasher.hash(null, left, right)
this.storage.put(MerkleTree.index_to_key(prefix, level, i), subRoot)
}
numberOfElementsInLevel = Math.ceil(numberOfElementsInLevel / 2)
}
}
}
static index_to_key(prefix, level, index) {
const key = `${prefix}_tree_${level}_${index}`
return key
}
async root() {
let root = await this.storage.get_or_element(
MerkleTree.index_to_key(this.prefix, this.n_levels, 0),
this.zero_values[this.n_levels],
)
return root
}
async path(index) {
class PathTraverser {
constructor(prefix, storage, zero_values) {
this.prefix = prefix
this.storage = storage
this.zero_values = zero_values
this.path_elements = []
this.path_index = []
}
async handle_index(level, element_index, sibling_index) {
const sibling = await this.storage.get_or_element(
MerkleTree.index_to_key(this.prefix, level, sibling_index),
this.zero_values[level],
)
this.path_elements.push(sibling)
this.path_index.push(element_index % 2)
}
}
index = Number(index)
let traverser = new PathTraverser(this.prefix, this.storage, this.zero_values)
const root = await this.storage.get_or_element(
MerkleTree.index_to_key(this.prefix, this.n_levels, 0),
this.zero_values[this.n_levels],
)
const element = await this.storage.get_or_element(
MerkleTree.index_to_key(this.prefix, 0, index),
this.zero_values[0],
)
await this.traverse(index, traverser)
return {
root,
path_elements: traverser.path_elements,
path_index: traverser.path_index,
element
}
}
async update(index, element, insert = false) {
if (!insert && index >= this.totalElements) {
throw Error('Use insert method for new elements.')
} else if(insert && index < this.totalElements) {
throw Error('Use update method for existing elements.')
}
try {
class UpdateTraverser {
constructor(prefix, storage, hasher, element, zero_values) {
this.prefix = prefix
this.current_element = element
this.zero_values = zero_values
this.storage = storage
this.hasher = hasher
this.key_values_to_put = []
}
async handle_index(level, element_index, sibling_index) {
if (level == 0) {
this.original_element = await this.storage.get_or_element(
MerkleTree.index_to_key(this.prefix, level, element_index),
this.zero_values[level],
)
}
const sibling = await this.storage.get_or_element(
MerkleTree.index_to_key(this.prefix, level, sibling_index),
this.zero_values[level],
)
let left, right
if (element_index % 2 == 0) {
left = this.current_element
right = sibling
} else {
left = sibling
right = this.current_element
}
this.key_values_to_put.push({
key: MerkleTree.index_to_key(this.prefix, level, element_index),
value: this.current_element,
})
this.current_element = this.hasher.hash(level, left, right)
}
}
let traverser = new UpdateTraverser(
this.prefix,
this.storage,
this.hasher,
element,
this.zero_values
)
await this.traverse(index, traverser)
traverser.key_values_to_put.push({
key: MerkleTree.index_to_key(this.prefix, this.n_levels, 0),
value: traverser.current_element,
})
await this.storage.put_batch(traverser.key_values_to_put)
} catch(e) {
console.error(e)
}
}
async insert(element) {
const index = this.totalElements
await this.update(index, element, true)
this.totalElements++
}
// todo it can be mode optimal
async insertPair(first, second) {
await insert(first)
await insert(second)
}
async traverse(index, handler) {
let current_index = index
for (let i = 0; i < this.n_levels; i++) {
let sibling_index = current_index
if (current_index % 2 == 0) {
sibling_index += 1
} else {
sibling_index -= 1
}
await handler.handle_index(i, current_index, sibling_index)
current_index = Math.floor(current_index / 2)
}
}
getIndexByElement(element) {
for(let i = this.totalElements - 1; i >= 0; i--) {
const elementFromTree = this.storage.get(MerkleTree.index_to_key(this.prefix, 0, i))
if (elementFromTree === element) {
return i
}
}
return false
}
}
module.exports = MerkleTree

View File

@ -1,17 +0,0 @@
const circomlib = require('circomlib')
const mimcsponge = circomlib.mimcsponge
const snarkjs = require('snarkjs')
const bigInt = snarkjs.bigInt
class MimcSpongeHasher {
hash(level, left, right) {
return mimcsponge.multiHash([bigInt(left), bigInt(right)]).toString()
}
hashArray(items) {
return mimcsponge.multiHash(items.map(item => bigInt(item))).toString()
}
}
module.exports = MimcSpongeHasher

View File

@ -1,39 +0,0 @@
class JsStorage {
constructor() {
this.db = {}
}
get(key) {
return this.db[key]
}
get_or_element(key, defaultElement) {
const element = this.db[key]
if (element === undefined) {
return defaultElement
} else {
return element
}
}
put(key, value) {
if (key === undefined || value === undefined) {
throw Error('key or value is undefined')
}
this.db[key] = value
}
del(key) {
delete this.db[key]
}
put_batch(key_values) {
key_values.forEach(element => {
this.db[element.key] = element.value
})
}
}
module.exports = JsStorage

View File

@ -1,6 +0,0 @@
/* global artifacts */
const Verifier = artifacts.require('Verifier')
module.exports = function(deployer) {
deployer.deploy(Verifier)
}

View File

@ -1,19 +0,0 @@
/* global artifacts */
const Verifier = artifacts.require('Verifier')
const TornadoPool = artifacts.require('TornadoPool')
const MERKLE_TREE_HEIGHT = 5
const MerkleTree = require('../lib/merkleTree')
const { bigInt } = require('snarkjs')
const toHex = (number, length = 32) => '0x' + (number instanceof Buffer ? number.toString('hex') : bigInt(number).toString(16)).padStart(length * 2, '0')
module.exports = function(deployer, network, accounts) {
return deployer.then(async () => {
const tree = new MerkleTree(MERKLE_TREE_HEIGHT)
const root = await tree.root()
const verifier = await Verifier.deployed()
const tornado = await deployer.deploy(TornadoPool, verifier.address, toHex(root))
console.log('TornadoPool\'s address ', tornado.address)
})
}

View File

@ -7,13 +7,11 @@
"test": "test" "test": "test"
}, },
"scripts": { "scripts": {
"build:circuit:compile": "npx circom circuits/transaction.circom -o build/circuits/transaction.json && npx snarkjs info -c build/circuits/transaction.json", "build:circuit": "./scripts/buildCircuit.sh transaction",
"build:circuit:setup": "npx snarkjs setup --protocol groth -c build/circuits/transaction.json --pk build/circuits/transaction_proving_key.json --vk build/circuits/transaction_verification_key.json",
"build:circuit:bin": "node node_modules/websnark/tools/buildpkey.js -i build/circuits/transaction_proving_key.json -o build/circuits/transaction_proving_key.bin",
"build:circuit:contract": "npx snarkjs generateverifier -v build/circuits/Verifier.sol --vk build/circuits/transaction_verification_key.json",
"build:circuit": "mkdir -p build/circuits && npm run build:circuit:compile && npm run build:circuit:setup && npm run build:circuit:bin && npm run build:circuit:contract",
"build:contract": "npx hardhat compile", "build:contract": "npx hardhat compile",
"build": "npm run build:circuit && npm run build:contract", "build": "npm run build:circuit && npm run build:contract",
"migrate": "npx hardhat run scripts/deploy.js --network localhost",
"start": "node ./src/index.js"
}, },
"keywords": [], "keywords": [],
"author": "", "author": "",
@ -21,15 +19,19 @@
"dependencies": { "dependencies": {
"@nomiclabs/hardhat-ethers": "^2.0.2", "@nomiclabs/hardhat-ethers": "^2.0.2",
"@nomiclabs/hardhat-waffle": "^2.0.1", "@nomiclabs/hardhat-waffle": "^2.0.1",
"@openzeppelin/contracts": "^2.5.0", "@openzeppelin/contracts": "^3.4.0",
"bignumber.js": "^9.0.0", "bignumber.js": "^9.0.0",
"circom": "0.0.35", "circom": "0.5.42",
"circomlib": "git+https://github.com/tornadocash/circomlib.git#c372f14d324d57339c88451834bf2824e73bbdbc", "circom_runtime": "^0.1.12",
"circomlib": "git+https://github.com/tornadocash/circomlib.git#d20d53411d1bef61f38c99a8b36d5d0cc4836aa1",
"dotenv": "^10.0.0", "dotenv": "^10.0.0",
"ethereum-waffle": "^3.2.0", "ethereum-waffle": "^3.2.0",
"ethers": "^5.0.0", "ethers": "^5.0.0",
"ffiasm": "^0.1.1",
"ffjavascript": "^0.2.35",
"fixed-merkle-tree": "^0.5.0",
"hardhat": "^2.3.0", "hardhat": "^2.3.0",
"snarkjs": "git+https://github.com/tornadocash/snarkjs.git#869181cfaf7526fe8972073d31655493a04326d5", "snarkjs": "^0.3.57",
"websnark": "git+https://github.com/tornadocash/websnark.git#2041cfa5fa0b71cd5cca9022a4eeea4afe28c9f7" "tmp-promise": "^3.0.2"
} }
} }

6
scripts/buildCircuit.sh Executable file
View File

@ -0,0 +1,6 @@
#!/bin/bash -e
mkdir -p artifacts/circuits
npx circom -v -r artifacts/circuits/$1.r1cs -w artifacts/circuits/$1.wasm -s artifacts/circuits/$1.sym circuits/$1.circom
zkutil setup -c artifacts/circuits/$1.r1cs -p artifacts/circuits/$1.params
zkutil generate-verifier -p artifacts/circuits/$1.params -v artifacts/circuits/Verifier.sol
npx snarkjs info -r artifacts/circuits/$1.r1cs

35
scripts/deploy.js Normal file
View File

@ -0,0 +1,35 @@
const { ethers } = require('hardhat')
const MERKLE_TREE_HEIGHT = 5
const MerkleTree = require('fixed-merkle-tree')
const { poseidon } = require('circomlib')
const poseidonHash = (items) => ethers.BigNumber.from(poseidon(items).toString())
const poseidonHash2 = (a, b) => poseidonHash([a, b])
const toFixedHex = (number, length = 32) =>
'0x' +
(number instanceof Buffer
? number.toString('hex')
: ethers.BigNumber.from(number).toHexString().slice(2)
).padStart(length * 2, '0')
async function main() {
const Verifier = await ethers.getContractFactory('Verifier')
const verifier = await Verifier.deploy()
await verifier.deployed()
console.log(`verifier: ${verifier.address}`)
const tree = new MerkleTree(MERKLE_TREE_HEIGHT, [], { hashFunction: poseidonHash2 })
const root = await tree.root()
const Pool = await ethers.getContractFactory('TornadoPool')
const tornado = await Pool.deploy(verifier.address, toFixedHex(root))
console.log("TornadoPool's address ", tornado.address)
}
main()
.then(() => process.exit(0))
.catch((error) => {
console.error(error)
process.exit(1)
})

View File

@ -1,56 +1,38 @@
/* eslint-disable no-console */ /* eslint-disable no-console */
const MerkleTree = require('../lib/merkleTree') const MerkleTree = require('fixed-merkle-tree')
const fs = require('fs') const fs = require('fs')
const { bigInt, stringifyBigInts } = require('snarkjs')
const crypto = require('crypto') const crypto = require('crypto')
const Hasher = require('../lib/mimc') const { poseidon } = require('circomlib')
const Web3 = require('web3') const Web3 = require('web3')
const buildGroth16 = require('websnark/src/groth16') const { ethers } = require('hardhat')
const websnarkUtils = require('websnark/src/utils') const { BigNumber } = ethers
const { randomBN, bitsToNumber, toFixedHex, toBuffer, poseidonHash, poseidonHash2 } = require('./utils')
let contract, web3, circuit, proving_key, groth16
const hasher = new Hasher()
// console.log(hasher.hashArray(['21663839004416932945382355908790599225266501822907911457504978515578255421292', '21663839004416932945382355908790599225266501822907911457504978515578255421292']))
let contract, web3
const { prove } = require('./prover')
const FIELD_SIZE = '21888242871839275222246405745257275088548364400416034343698204186575808495617'
const MERKLE_TREE_HEIGHT = 5 const MERKLE_TREE_HEIGHT = 5
const RPC_URL = 'http://localhost:8545' const RPC_URL = 'http://localhost:8545'
const FIELD_SIZE = '21888242871839275222246405745257275088548364400416034343698204186575808495617'
/** Generate random number of specified byte length */
const rbigint = (nbytes = 31) => bigInt.leBuff2int(crypto.randomBytes(nbytes))
/** BigNumber to hex string of specified length */
const toHex = (number, length = 32) => '0x' + (number instanceof Buffer ? number.toString('hex') : bigInt(number).toString(16)).padStart(length * 2, '0')
function merklePathIndicesToBigint(indexArray) {
let result = 0
for(let item of indexArray.slice().reverse()) {
result = (result << 1) + item
}
return result
}
function fromPrivkey(privkey) { function fromPrivkey(privkey) {
return { return {
privkey, privkey,
pubkey: hasher.hashArray([privkey]), pubkey: poseidonHash([privkey]),
} }
} }
function randomKeypair() { function randomKeypair() {
return fromPrivkey(rbigint()) return fromPrivkey(randomBN())
} }
function createZeroUtxo(keypair) { function createZeroUtxo(keypair) {
return createUtxo( return createUtxo(
0, 0,
rbigint(), randomBN(),
keypair.pubkey, keypair.pubkey,
keypair.privkey, keypair.privkey,
Array(MERKLE_TREE_HEIGHT).fill(0), Array(MERKLE_TREE_HEIGHT).fill(0),
Array(MERKLE_TREE_HEIGHT).fill(0) Array(MERKLE_TREE_HEIGHT).fill(0),
) )
} }
@ -58,7 +40,7 @@ function createOutput(amount, pubkey) {
if (!pubkey) { if (!pubkey) {
throw new Error('no pubkey') throw new Error('no pubkey')
} }
return createUtxo(amount, rbigint(), pubkey) return createUtxo(amount, randomBN(), pubkey)
} }
function createInput({ amount, blinding, pubkey, privkey, merklePathIndices, merklePathElements }) { function createInput({ amount, blinding, pubkey, privkey, merklePathIndices, merklePathElements }) {
@ -68,9 +50,9 @@ function createInput({ amount, blinding, pubkey, privkey, merklePathIndices, mer
/// unsafe function without sanity checks /// unsafe function without sanity checks
function createUtxo(amount, blinding, pubkey, privkey, merklePathIndices, merklePathElements) { function createUtxo(amount, blinding, pubkey, privkey, merklePathIndices, merklePathElements) {
let utxo = { amount, blinding, pubkey, privkey, merklePathIndices, merklePathElements } let utxo = { amount, blinding, pubkey, privkey, merklePathIndices, merklePathElements }
utxo.commitment = hasher.hashArray([amount, blinding, pubkey]) utxo.commitment = poseidonHash([amount, blinding, pubkey])
if (privkey) { if (privkey) {
utxo.nullifier = hasher.hashArray([utxo.commitment, merklePathIndicesToBigint(merklePathIndices), privkey]) utxo.nullifier = poseidonHash([utxo.commitment, bitsToNumber(merklePathIndices), privkey])
} }
return utxo return utxo
} }
@ -91,16 +73,16 @@ async function buildMerkleTree() {
const events = await contract.getPastEvents('NewCommitment', { fromBlock: 0, toBlock: 'latest' }) const events = await contract.getPastEvents('NewCommitment', { fromBlock: 0, toBlock: 'latest' })
const leaves = events const leaves = events
.sort((a, b) => a.returnValues.index - b.returnValues.index) // todo sort by event date .sort((a, b) => a.returnValues.index - b.returnValues.index) // todo sort by event date
.map(e => toHex(e.returnValues.commitment)) .map((e) => toFixedHex(e.returnValues.commitment))
console.log('leaves', leaves) console.log('leaves', leaves)
return new MerkleTree(MERKLE_TREE_HEIGHT, leaves) return new MerkleTree(MERKLE_TREE_HEIGHT, leaves, { hashFunction: poseidonHash2 })
} }
async function insertOutput(tree, output) { async function insertOutput(tree, output) {
await tree.insert(output.commitment) await tree.insert(output.commitment)
let { path_elements, path_index } = await tree.path(tree.totalElements - 1) let { pathElements, pathIndices } = await tree.path(tree.elements().length - 1)
output.merklePathIndices = path_index output.merklePathIndices = pathIndices
output.merklePathElements = path_elements output.merklePathElements = pathElements
} }
async function deposit() { async function deposit() {
@ -129,37 +111,42 @@ async function deposit() {
// data for 2 transaction inputs // data for 2 transaction inputs
inAmount: [tx.inputs[0].amount, tx.inputs[1].amount], inAmount: [tx.inputs[0].amount, tx.inputs[1].amount],
inBlinding: [tx.inputs[0].blinding, tx.inputs[1].blinding], inBlinding: [tx.inputs[0].blinding, tx.inputs[1].blinding],
inPathIndices: [merklePathIndicesToBigint(tx.inputs[0].merklePathIndices), merklePathIndicesToBigint(tx.inputs[1].merklePathIndices)], inPathIndices: [
bitsToNumber(tx.inputs[0].merklePathIndices),
bitsToNumber(tx.inputs[1].merklePathIndices),
],
inPathElements: [tx.inputs[0].merklePathElements, tx.inputs[1].merklePathElements], inPathElements: [tx.inputs[0].merklePathElements, tx.inputs[1].merklePathElements],
// data for 2 transaction outputs // data for 2 transaction outputs
outAmount: [tx.outputs[0].amount, tx.outputs[1].amount], outAmount: [tx.outputs[0].amount, tx.outputs[1].amount],
outBlinding: [tx.outputs[0].blinding, tx.outputs[1].blinding], outBlinding: [tx.outputs[0].blinding, tx.outputs[1].blinding],
outPubkey: [tx.outputs[0].pubkey, tx.outputs[1].pubkey], outPubkey: [tx.outputs[0].pubkey, tx.outputs[1].pubkey],
outPathIndices: merklePathIndicesToBigint(tx.outputs[0].merklePathIndices.slice(1)), outPathIndices: bitsToNumber(tx.outputs[0].merklePathIndices.slice(1)),
outPathElements: tx.outputs[0].merklePathElements.slice(1) outPathElements: tx.outputs[0].merklePathElements.slice(1),
} }
// console.log('input', JSON.stringify(stringifyBigInts(input))) // console.log('input', JSON.stringify(stringifyBigInts(input)))
console.log('DEPOSIT input', input) console.log('DEPOSIT input', input)
console.log('Generating SNARK proof...') console.log('Generating SNARK proof...')
const proofData = await websnarkUtils.genWitnessAndProve(groth16, input, circuit, proving_key)
const { proof } = websnarkUtils.toSolidityInput(proofData) const proof = await prove(input, './artifacts/circuits/transaction')
const args = [ const args = [
toHex(input.root), toFixedHex(input.root),
toHex(input.newRoot), toFixedHex(input.newRoot),
[toHex(tx.inputs[0].nullifier), toHex(tx.inputs[1].nullifier)], [toFixedHex(tx.inputs[0].nullifier), toFixedHex(tx.inputs[1].nullifier)],
[toHex(tx.outputs[0].commitment), toHex(tx.outputs[1].commitment)], [toFixedHex(tx.outputs[0].commitment), toFixedHex(tx.outputs[1].commitment)],
toHex(amount), toFixedHex(amount),
toHex(input.fee), toFixedHex(input.fee),
toHex(input.recipient, 20), toFixedHex(input.recipient, 20),
toHex(input.relayer, 20), toFixedHex(input.relayer, 20),
] ]
console.log('Sending deposit transaction...') console.log('Sending deposit transaction...')
const receipt = await contract.methods.transaction(proof, ...args).send({ value: amount, from: web3.eth.defaultAccount, gas: 1e6 }) const receipt = await contract.methods
.transaction(proof, ...args)
.send({ value: amount, from: web3.eth.defaultAccount, gas: 1e6 })
console.log(`Receipt ${receipt.transactionHash}`) console.log(`Receipt ${receipt.transactionHash}`)
return tx.outputs[0] return tx.outputs[0]
} }
@ -167,18 +154,22 @@ async function deposit() {
async function transact(txOutput) { async function transact(txOutput) {
console.log('txOutput', txOutput) console.log('txOutput', txOutput)
const tree = await buildMerkleTree() const tree = await buildMerkleTree()
console.log('tree', tree)
const oldRoot = await tree.root() const oldRoot = await tree.root()
const keypair = randomKeypair() const keypair = randomKeypair()
const index = await tree.getIndexByElement(toHex(txOutput.commitment)) const index = await tree.indexOf(toFixedHex(txOutput.commitment))
console.log('index', index) console.log('index', index)
const { path_elements, path_index } = await tree.path(index) const { pathElements, pathIndices } = await tree.path(index)
console.log('path_index', path_index) console.log('pathIndices', pathIndices)
txOutput.merklePathElements = path_elements txOutput.merklePathElements = pathElements
const input1 = createInput(txOutput) const input1 = createInput(txOutput)
const tx = { const tx = {
inputs: [input1, createZeroUtxo(fromPrivkey(txOutput.privkey))], inputs: [input1, createZeroUtxo(fromPrivkey(txOutput.privkey))],
outputs: [createOutput(txOutput.amount / 4, keypair.pubkey), createOutput(txOutput.amount * 3 / 4, txOutput.pubkey)], // todo shuffle outputs: [
createOutput(txOutput.amount / 4, keypair.pubkey),
createOutput((txOutput.amount * 3) / 4, txOutput.pubkey),
], // todo shuffle
} }
tx.outputs[0].privkey = keypair.privkey tx.outputs[0].privkey = keypair.privkey
tx.outputs[1].privkey = txOutput.privkey tx.outputs[1].privkey = txOutput.privkey
@ -202,36 +193,40 @@ async function transact(txOutput) {
// data for 2 transaction inputs // data for 2 transaction inputs
inAmount: [tx.inputs[0].amount, tx.inputs[1].amount], inAmount: [tx.inputs[0].amount, tx.inputs[1].amount],
inBlinding: [tx.inputs[0].blinding, tx.inputs[1].blinding], inBlinding: [tx.inputs[0].blinding, tx.inputs[1].blinding],
inPathIndices: [merklePathIndicesToBigint(tx.inputs[0].merklePathIndices), merklePathIndicesToBigint(tx.inputs[1].merklePathIndices)], inPathIndices: [
bitsToNumber(tx.inputs[0].merklePathIndices),
bitsToNumber(tx.inputs[1].merklePathIndices),
],
inPathElements: [tx.inputs[0].merklePathElements, tx.inputs[1].merklePathElements], inPathElements: [tx.inputs[0].merklePathElements, tx.inputs[1].merklePathElements],
// data for 2 transaction outputs // data for 2 transaction outputs
outAmount: [tx.outputs[0].amount, tx.outputs[1].amount], outAmount: [tx.outputs[0].amount, tx.outputs[1].amount],
outBlinding: [tx.outputs[0].blinding, tx.outputs[1].blinding], outBlinding: [tx.outputs[0].blinding, tx.outputs[1].blinding],
outPubkey: [tx.outputs[0].pubkey, tx.outputs[1].pubkey], outPubkey: [tx.outputs[0].pubkey, tx.outputs[1].pubkey],
outPathIndices: merklePathIndicesToBigint(tx.outputs[0].merklePathIndices.slice(1)), outPathIndices: bitsToNumber(tx.outputs[0].merklePathIndices.slice(1)),
outPathElements: tx.outputs[0].merklePathElements.slice(1) outPathElements: tx.outputs[0].merklePathElements.slice(1),
} }
console.log('TRANSFER input', input) console.log('TRANSFER input', input)
console.log('Generating SNARK proof...') console.log('Generating SNARK proof...')
const proofData = await websnarkUtils.genWitnessAndProve(groth16, input, circuit, proving_key) const proof = await prove(input, './artifacts/circuits/transaction')
const { proof } = websnarkUtils.toSolidityInput(proofData)
const args = [ const args = [
toHex(input.root), toFixedHex(input.root),
toHex(input.newRoot), toFixedHex(input.newRoot),
[toHex(tx.inputs[0].nullifier), toHex(tx.inputs[1].nullifier)], [toFixedHex(tx.inputs[0].nullifier), toFixedHex(tx.inputs[1].nullifier)],
[toHex(tx.outputs[0].commitment), toHex(tx.outputs[1].commitment)], [toFixedHex(tx.outputs[0].commitment), toFixedHex(tx.outputs[1].commitment)],
toHex(0), toFixedHex(0),
toHex(input.fee), toFixedHex(input.fee),
toHex(input.recipient, 20), toFixedHex(input.recipient, 20),
toHex(input.relayer, 20), toFixedHex(input.relayer, 20),
] ]
console.log('Sending transfer transaction...') console.log('Sending transfer transaction...')
const receipt = await contract.methods.transaction(proof, ...args).send({ from: web3.eth.defaultAccount, gas: 1e6 }) const receipt = await contract.methods
.transaction(proof, ...args)
.send({ from: web3.eth.defaultAccount, gas: 1e6 })
console.log(`Receipt ${receipt.transactionHash}`) console.log(`Receipt ${receipt.transactionHash}`)
return tx.outputs[0] return tx.outputs[0]
} }
@ -241,11 +236,11 @@ async function withdraw(txOutput) {
const tree = await buildMerkleTree() const tree = await buildMerkleTree()
const oldRoot = await tree.root() const oldRoot = await tree.root()
const index = await tree.getIndexByElement(toHex(txOutput.commitment)) const index = await tree.indexOf(toFixedHex(txOutput.commitment))
console.log('index', index) console.log('index', index)
const { path_elements, path_index } = await tree.path(index) const { pathElements, pathIndices } = await tree.path(index)
console.log('path_index', path_index) console.log('pathIndices', pathIndices)
txOutput.merklePathElements = path_elements txOutput.merklePathElements = pathElements
const input1 = createInput(txOutput) const input1 = createInput(txOutput)
const fakeKeypair = randomKeypair() const fakeKeypair = randomKeypair()
const tx = { const tx = {
@ -260,7 +255,7 @@ async function withdraw(txOutput) {
newRoot: await tree.root(), newRoot: await tree.root(),
inputNullifier: [tx.inputs[0].nullifier, tx.inputs[1].nullifier], inputNullifier: [tx.inputs[0].nullifier, tx.inputs[1].nullifier],
outputCommitment: [tx.outputs[0].commitment, tx.outputs[1].commitment], outputCommitment: [tx.outputs[0].commitment, tx.outputs[1].commitment],
extAmount: bigInt(FIELD_SIZE).sub(bigInt(txOutput.amount)), extAmount: BigNumber.from(FIELD_SIZE).sub(BigNumber.from(txOutput.amount)),
fee: 0, fee: 0,
recipient: '0xc2Ba33d4c0d2A92fb4f1a07C273c5d21E688Eb48', recipient: '0xc2Ba33d4c0d2A92fb4f1a07C273c5d21E688Eb48',
relayer: 0, relayer: 0,
@ -271,38 +266,42 @@ async function withdraw(txOutput) {
// data for 2 transaction inputs // data for 2 transaction inputs
inAmount: [tx.inputs[0].amount, tx.inputs[1].amount], inAmount: [tx.inputs[0].amount, tx.inputs[1].amount],
inBlinding: [tx.inputs[0].blinding, tx.inputs[1].blinding], inBlinding: [tx.inputs[0].blinding, tx.inputs[1].blinding],
inPathIndices: [merklePathIndicesToBigint(tx.inputs[0].merklePathIndices), merklePathIndicesToBigint(tx.inputs[1].merklePathIndices)], inPathIndices: [
bitsToNumber(tx.inputs[0].merklePathIndices),
bitsToNumber(tx.inputs[1].merklePathIndices),
],
inPathElements: [tx.inputs[0].merklePathElements, tx.inputs[1].merklePathElements], inPathElements: [tx.inputs[0].merklePathElements, tx.inputs[1].merklePathElements],
// data for 2 transaction outputs // data for 2 transaction outputs
outAmount: [tx.outputs[0].amount, tx.outputs[1].amount], outAmount: [tx.outputs[0].amount, tx.outputs[1].amount],
outBlinding: [tx.outputs[0].blinding, tx.outputs[1].blinding], outBlinding: [tx.outputs[0].blinding, tx.outputs[1].blinding],
outPubkey: [tx.outputs[0].pubkey, tx.outputs[1].pubkey], outPubkey: [tx.outputs[0].pubkey, tx.outputs[1].pubkey],
outPathIndices: merklePathIndicesToBigint(tx.outputs[0].merklePathIndices.slice(1)), outPathIndices: bitsToNumber(tx.outputs[0].merklePathIndices.slice(1)),
outPathElements: tx.outputs[0].merklePathElements.slice(1) outPathElements: tx.outputs[0].merklePathElements.slice(1),
} }
console.log('WITHDRAW input', input) console.log('WITHDRAW input', input)
console.log('Generating SNARK proof...') console.log('Generating SNARK proof...')
const proofData = await websnarkUtils.genWitnessAndProve(groth16, input, circuit, proving_key) const proof = await prove(input, './artifacts/circuits/transaction')
const { proof } = websnarkUtils.toSolidityInput(proofData)
const args = [ const args = [
toHex(input.root), toFixedHex(input.root),
toHex(input.newRoot), toFixedHex(input.newRoot),
[toHex(tx.inputs[0].nullifier), toHex(tx.inputs[1].nullifier)], [toFixedHex(tx.inputs[0].nullifier), toFixedHex(tx.inputs[1].nullifier)],
[toHex(tx.outputs[0].commitment), toHex(tx.outputs[1].commitment)], [toFixedHex(tx.outputs[0].commitment), toFixedHex(tx.outputs[1].commitment)],
toHex(input.extAmount), toFixedHex(input.extAmount),
toHex(input.fee), toFixedHex(input.fee),
toHex(input.recipient, 20), toFixedHex(input.recipient, 20),
toHex(input.relayer, 20), toFixedHex(input.relayer, 20),
] ]
console.log('args', args) console.log('args', args)
console.log('Sending withdraw transaction...') console.log('Sending withdraw transaction...')
const receipt = await contract.methods.transaction(proof, ...args).send({ from: web3.eth.defaultAccount, gas: 1e6 }) const receipt = await contract.methods
.transaction(proof, ...args)
.send({ from: web3.eth.defaultAccount, gas: 1e6 })
console.log(`Receipt ${receipt.transactionHash}`) console.log(`Receipt ${receipt.transactionHash}`)
let bal = await web3.eth.getBalance('0xc2Ba33d4c0d2A92fb4f1a07C273c5d21E688Eb48') let bal = await web3.eth.getBalance('0xc2Ba33d4c0d2A92fb4f1a07C273c5d21E688Eb48')
@ -310,19 +309,17 @@ async function withdraw(txOutput) {
} }
async function main() { async function main() {
web3 = new Web3(new Web3.providers.HttpProvider(RPC_URL, { timeout: 5 * 60 * 1000 }), null, { transactionConfirmationBlocks: 1 }) web3 = new Web3(new Web3.providers.HttpProvider(RPC_URL, { timeout: 5 * 60 * 1000 }), null, {
circuit = require('../build/circuits/transaction.json') transactionConfirmationBlocks: 1,
proving_key = fs.readFileSync('../build/circuits/transaction_proving_key.bin').buffer })
groth16 = await buildGroth16()
netId = await web3.eth.net.getId() netId = await web3.eth.net.getId()
const contractData = require('../build/contracts/TornadoPool.json') const contractData = require('../artifacts/contracts/TornadoPool.sol/TornadoPool.json')
contract = new web3.eth.Contract(contractData.abi, contractData.networks[netId].address) contract = new web3.eth.Contract(contractData.abi, '0xCf7Ed3AccA5a467e9e704C703E8D87F634fB0Fc9')
web3.eth.defaultAccount = (await web3.eth.getAccounts())[0] web3.eth.defaultAccount = (await web3.eth.getAccounts())[0]
const txOutput = await deposit() const txOutput = await deposit()
const txOutput1 = await transact(txOutput) const txOutput1 = await transact(txOutput)
await withdraw(txOutput1) await withdraw(txOutput1)
} }
main() main()

41
src/prover.js Normal file
View File

@ -0,0 +1,41 @@
const { wtns } = require('snarkjs')
const { utils } = require('ffjavascript')
const fs = require('fs')
const tmp = require('tmp-promise')
const util = require('util')
const exec = util.promisify(require('child_process').exec)
function stringify() {}
function prove(input, keyBasePath) {
input = utils.stringifyBigInts(input)
console.log('input', input)
return tmp.dir().then(async (dir) => {
dir = dir.path
let out
try {
await wtns.debug(
utils.unstringifyBigInts(input),
`${keyBasePath}.wasm`,
`${dir}/witness.wtns`,
`${keyBasePath}.sym`,
{},
console,
)
const witness = utils.stringifyBigInts(await wtns.exportJson(`${dir}/witness.wtns`))
fs.writeFileSync(`${dir}/witness.json`, JSON.stringify(witness, null, 2))
out = await exec(
`zkutil prove -c ${keyBasePath}.r1cs -p ${keyBasePath}.params -w ${dir}/witness.json -r ${dir}/proof.json -o ${dir}/public.json`,
)
} catch (e) {
console.log(out, e)
throw e
}
return '0x' + JSON.parse(fs.readFileSync(`${dir}/proof.json`)).proof
})
}
module.exports = { prove }

44
src/utils.js Normal file
View File

@ -0,0 +1,44 @@
const crypto = require('crypto')
const ethers = require('ethers')
const BigNumber = ethers.BigNumber
const { poseidon } = require('circomlib')
const poseidonHash = (items) => BigNumber.from(poseidon(items).toString())
const poseidonHash2 = (a, b) => poseidonHash([a, b])
/** Generate random number of specified byte length */
const randomBN = (nbytes = 31) => BigNumber.from(crypto.randomBytes(nbytes))
/** BigNumber to hex string of specified length */
const toFixedHex = (number, length = 32) =>
'0x' +
(number instanceof Buffer
? number.toString('hex')
: BigNumber.from(number).toHexString().slice(2)
).padStart(length * 2, '0')
const toBuffer = (value, length) =>
Buffer.from(
BigNumber.from(value)
.toHexString()
.slice(2)
.padStart(length * 2, '0'),
'hex',
)
function bitsToNumber(bits) {
let result = 0
for (const item of bits.slice().reverse()) {
result = (result << 1) + item
}
return result
}
module.exports = {
randomBN,
bitsToNumber,
toFixedHex,
toBuffer,
poseidonHash,
poseidonHash2,
}

View File

@ -1,99 +0,0 @@
/**
* Use this file to configure your truffle project. It's seeded with some
* common settings for different networks and features like migrations,
* compilation and testing. Uncomment the ones you need or modify
* them to suit your project as necessary.
*
* More information about configuration can be found at:
*
* truffleframework.com/docs/advanced/configuration
*
* To deploy via Infura you'll need a wallet provider (like @truffle/hdwallet-provider)
* to sign your transactions before they're sent to a remote public node. Infura accounts
* are available for free at: infura.io/register.
*
* You'll also need a mnemonic - the twelve word phrase the wallet uses to generate
* public/private key pairs. If you're publishing your code to GitHub make sure you load this
* phrase from a file you've .gitignored so it doesn't accidentally become public.
*
*/
// const HDWalletProvider = require('@truffle/hdwallet-provider');
// const infuraKey = "fj4jll3k.....";
//
// const fs = require('fs');
// const mnemonic = fs.readFileSync(".secret").toString().trim();
module.exports = {
/**
* Networks define how you connect to your ethereum client and let you set the
* defaults web3 uses to send transactions. If you don't specify one truffle
* will spin up a development blockchain for you on port 9545 when you
* run `develop` or `test`. You can ask a truffle command to use a specific
* network from the command line, e.g
*
* $ truffle test --network <network-name>
*/
networks: {
// Useful for testing. The `development` name is special - truffle uses it by default
// if it's defined here and no other network is specified at the command line.
// You should run a client (like ganache-cli, geth or parity) in a separate terminal
// tab if you use this network and you must also set the `host`, `port` and `network_id`
// options below to some value.
//
development: {
host: "127.0.0.1", // Localhost (default: none)
port: 8545, // Standard Ethereum port (default: none)
network_id: "*", // Any network (default: none)
},
// Another network with more advanced options...
// advanced: {
// port: 8777, // Custom port
// network_id: 1342, // Custom network
// gas: 8500000, // Gas sent with each transaction (default: ~6700000)
// gasPrice: 20000000000, // 20 gwei (in wei) (default: 100 gwei)
// from: <address>, // Account to send txs from (default: accounts[0])
// websockets: true // Enable EventEmitter interface for web3 (default: false)
// },
// Useful for deploying to a public network.
// NB: It's important to wrap the provider as a function.
// ropsten: {
// provider: () => new HDWalletProvider(mnemonic, `https://ropsten.infura.io/v3/YOUR-PROJECT-ID`),
// network_id: 3, // Ropsten's id
// gas: 5500000, // Ropsten has a lower block limit than mainnet
// confirmations: 2, // # of confs to wait between deployments. (default: 0)
// timeoutBlocks: 200, // # of blocks before a deployment times out (minimum/default: 50)
// skipDryRun: true // Skip dry run before migrations? (default: false for public nets )
// },
// Useful for private networks
// private: {
// provider: () => new HDWalletProvider(mnemonic, `https://network.io`),
// network_id: 2111, // This network is yours, in the cloud.
// production: true // Treats this network as if it was a public net. (default: false)
// }
},
// Set default mocha options here, use special reporters etc.
mocha: {
// timeout: 100000
},
// Configure your compilers
compilers: {
solc: {
version: "0.5.17", // Fetch exact version from solc-bin (default: truffle's version)
// docker: true, // Use "0.5.1" you've installed locally with docker (default: false)
settings: { // See the solidity docs for advice about optimization and evmVersion
optimizer: {
enabled: true,
runs: 200
},
//evmVersion: "byzantium"
}
}
}
}

776
yarn.lock

File diff suppressed because it is too large Load Diff