This commit is contained in:
poma 2021-02-02 14:27:58 +03:00
parent 71efcb28a7
commit 4d6a05a88b
No known key found for this signature in database
GPG Key ID: BA20CB01FE165657
10 changed files with 96 additions and 56 deletions

View File

@ -1,8 +1,6 @@
.vscode
build
artifacts
cache
circuits
scripts
contracts/verifiers/RewardVerifier.sol
contracts/verifiers/WithdrawVerifier.sol
contracts/verifiers/TreeUpdateVerifier.sol
contracts/utils/FloatMath.sol
contracts/verifiers

View File

@ -3,7 +3,6 @@ pragma solidity ^0.6.0;
import "hardhat/console.sol";
contract Greeter {
string greeting;

View File

@ -3,7 +3,6 @@
pragma solidity ^0.6.0;
contract Pack {
uint256 public constant CHUNK_TREE_HEIGHT = 7;
uint256 public constant CHUNK_SIZE = 2**CHUNK_TREE_HEIGHT;
uint256 public constant ITEM_SIZE = 32 + 20 + 4;
@ -17,21 +16,25 @@ contract Pack {
event DepositData(address instance, bytes32 indexed hash, uint256 block, uint256 index);
function pack2(bytes32[CHUNK_SIZE] memory hashes, address[CHUNK_SIZE] memory instances, uint32[CHUNK_SIZE] memory blocks) public {
function pack2(
bytes32[CHUNK_SIZE] memory hashes,
address[CHUNK_SIZE] memory instances,
uint32[CHUNK_SIZE] memory blocks
) public {
uint256 gasBefore = gasleft();
bytes memory data = new bytes(BYTES_SIZE);
for(uint i = 0; i < CHUNK_SIZE; i++) {
for (uint256 i = 0; i < CHUNK_SIZE; i++) {
(bytes32 hash, address instance, uint32 block) = (hashes[i], instances[i], blocks[i]);
assembly {
mstore(add(add(data, mul(ITEM_SIZE, i)), 0x38), block)
mstore(add(add(data, mul(ITEM_SIZE, i)), 0x34), instance)
mstore(add(add(data, mul(ITEM_SIZE, i)), 0x20), hash)
}
}
}
uint256 gasHash = gasleft();
bytes32 hash1 = sha256(data);
uint256 gasEvents = gasleft();
for(uint i = 0; i < CHUNK_SIZE; i++) {
for (uint256 i = 0; i < CHUNK_SIZE; i++) {
emit DepositData(instances[i], hashes[i], blocks[i], i);
}
gas1 = gasEvents - gasleft();
@ -41,19 +44,31 @@ contract Pack {
hash = hash1;
}
function pack3(bytes32[CHUNK_SIZE] memory hashes, address[CHUNK_SIZE] memory instances, uint32[CHUNK_SIZE] memory blocks) public view returns(uint256 gas1, uint256 gas2, bytes32 hash) {
function pack3(
bytes32[CHUNK_SIZE] memory hashes,
address[CHUNK_SIZE] memory instances,
uint32[CHUNK_SIZE] memory blocks
)
public
view
returns (
uint256 gas1,
uint256 gas2,
bytes32 hash
)
{
uint256 gasBefore = gasleft();
bytes memory data = new bytes(BYTES_SIZE);
for(uint i = 0; i < CHUNK_SIZE; i++) {
for (uint256 i = 0; i < CHUNK_SIZE; i++) {
(bytes32 hash, address instance, uint32 block) = (hashes[i], instances[i], blocks[i]);
assembly {
mstore(add(add(data, mul(ITEM_SIZE, i)), 0x38), block)
mstore(add(add(data, mul(ITEM_SIZE, i)), 0x34), instance)
mstore(add(add(data, mul(ITEM_SIZE, i)), 0x20), hash)
}
}
}
uint256 gasHash = gasleft();
bytes32 hash = sha256(data);
return (gasleft() - gasHash, gasHash - gasBefore, hash);
}
}
}

View File

@ -1,14 +1,14 @@
require("@nomiclabs/hardhat-waffle");
require('@nomiclabs/hardhat-waffle')
// This is a sample Hardhat task. To learn how to create your own go to
// https://hardhat.org/guides/create-task.html
task("accounts", "Prints the list of accounts", async () => {
const accounts = await ethers.getSigners();
task('accounts', 'Prints the list of accounts', async () => {
const accounts = await ethers.getSigners()
for (const account of accounts) {
console.log(account.address);
console.log(account.address)
}
});
})
// You need to export an object to set up your config
// Go to https://hardhat.org/config/ to learn more
@ -17,6 +17,5 @@ task("accounts", "Prints the list of accounts", async () => {
* @type import('hardhat/config').HardhatUserConfig
*/
module.exports = {
solidity: "0.6.12",
};
solidity: '0.6.12',
}

View File

@ -1,5 +1,20 @@
{
"name": "hardhat-project",
"repository": "https://github.com/tornadocash/tornado-trees.git",
"author": "Tornadocash team <hello@tornado.cash>",
"license": "MIT",
"scripts": {
"compile": "npx hardhat compile",
"test": "npx hardhat test",
"eslint": "eslint --ext .js --ignore-path .gitignore .",
"prettier:check": "prettier --check . --config .prettierrc",
"prettier:fix": "prettier --write . --config .prettierrc",
"lint": "yarn eslint && yarn prettier:check",
"circuit:batchTreeUpdate": "scripts/buildCircuit.sh BatchTreeUpdate",
"circuit:batchTreeUpdateLarge": "scripts/buildCircuit.sh BatchTreeUpdate large",
"circuit:batchTreeUpdateWitness": "scripts/buildWitness.sh BatchTreeUpdate",
"circuit": "yarn circuit:batchTreeUpdate"
},
"devDependencies": {
"@nomiclabs/hardhat-ethers": "^2.0.1",
"@nomiclabs/hardhat-waffle": "^2.0.1",

View File

@ -1,13 +1,7 @@
const ethers = require('ethers')
const BigNumber = ethers.BigNumber
const {
bitsToNumber,
toFixedHex,
toBuffer,
poseidonHash,
poseidonHash2,
} = require('./utils')
const { bitsToNumber, toFixedHex, toBuffer, poseidonHash, poseidonHash2 } = require('./utils')
const jsSHA = require('jssha')
@ -29,12 +23,14 @@ function hashInputs(input) {
}
const hash = '0x' + sha.getHash('HEX')
const result = BigNumber.from(hash).mod(BigNumber.from('21888242871839275222246405745257275088548364400416034343698204186575808495617')).toString()
const result = BigNumber.from(hash)
.mod(BigNumber.from('21888242871839275222246405745257275088548364400416034343698204186575808495617'))
.toString()
return result
}
function prove(input, keyBasePath) {
return tmp.dir().then(async dir => {
return tmp.dir().then(async (dir) => {
dir = dir.path
fs.writeFileSync(`${dir}/input.json`, JSON.stringify(input, null, 2))
let out
@ -47,13 +43,15 @@ function prove(input, keyBasePath) {
out = await exec(`npx snarkjs wd ${keyBasePath}.wasm ${dir}/input.json ${dir}/witness.wtns`)
out = await exec(`npx snarkjs wej ${dir}/witness.wtns ${dir}/witness.json`)
}
out = await exec(`zkutil prove -c ${keyBasePath}.r1cs -p ${keyBasePath}.params -w ${dir}/witness.json -r ${dir}/proof.json -o ${dir}/public.json`)
out = await exec(
`zkutil prove -c ${keyBasePath}.r1cs -p ${keyBasePath}.params -w ${dir}/witness.json -r ${dir}/proof.json -o ${dir}/public.json`,
)
} catch (e) {
console.log(out, e)
throw e
}
return '0x' + JSON.parse(fs.readFileSync(`${dir}/proof.json`)).proof
});
})
}
function batchTreeUpdate(tree, events) {
@ -67,7 +65,7 @@ function batchTreeUpdate(tree, events) {
tree.bulkInsert(leaves)
const newRoot = tree.root().toString()
let { pathElements, pathIndices } = tree.path(tree.elements().length - 1)
pathElements = pathElements.slice(batchHeight).map(a => BigNumber.from(a).toString())
pathElements = pathElements.slice(batchHeight).map((a) => BigNumber.from(a).toString())
pathIndices = bitsToNumber(pathIndices.slice(batchHeight)).toString()
const input = {

View File

@ -13,9 +13,19 @@ const randomBN = (nbytes = 31) => BigNumber.from(crypto.randomBytes(nbytes))
/** BigNumber to hex string of specified length */
const toFixedHex = (number, length = 32) =>
'0x' +
(number instanceof Buffer ? number.toString('hex') : BigNumber.from(number).toHexString().slice(2)).padStart(length * 2, '0')
(number instanceof Buffer
? number.toString('hex')
: BigNumber.from(number).toHexString().slice(2)
).padStart(length * 2, '0')
const toBuffer = (value, length) => Buffer.from(BigNumber.from(value).toHexString().slice(2).padStart(length * 2, '0'), 'hex')
const toBuffer = (value, length) =>
Buffer.from(
BigNumber.from(value)
.toHexString()
.slice(2)
.padStart(length * 2, '0'),
'hex',
)
function bitsToNumber(bits) {
let result = 0

View File

@ -1,5 +1,5 @@
/* global artifacts, web3, contract */
const { expect } = require("chai")
const { expect } = require('chai')
const MerkleTree = require('fixed-merkle-tree')
const jsSHA = require('jssha')
const { poseidonHash2 } = require('../src/utils')
@ -15,7 +15,7 @@ const instances = [
const hashes = [
'0x6f44cd7458bf24f65851fa8097712e3a8d9a6f3e387c501b285338308a74b8f3',
'0xafd3103939b7b0cd7a0ad1ddac57dd13af7f2825a21b47ae995b5bb0f767a106',
'0x57f7b90a3cb4ea6860e6dd5fa44ac4f53ebe6ae3948af577a01ef51738313246'
'0x57f7b90a3cb4ea6860e6dd5fa44ac4f53ebe6ae3948af577a01ef51738313246',
]
const levels = 20
@ -23,7 +23,7 @@ const CHUNK_TREE_HEIGHT = 7
describe.skip('Pack', () => {
it('should work', async () => {
const tree = new MerkleTree(levels, [], { hashFunction: poseidonHash2 })
const Pack = await ethers.getContractFactory("Pack")
const Pack = await ethers.getContractFactory('Pack')
const pack = await Pack.deploy()
const notes = []
@ -34,7 +34,11 @@ describe.skip('Pack', () => {
block: 1 + i,
}
}
const receipt = await pack.pack2(notes.map(a => a.hash), notes.map(a => a.instance), notes.map(a => a.block))
const receipt = await pack.pack2(
notes.map((a) => a.hash),
notes.map((a) => a.instance),
notes.map((a) => a.block),
)
const receipt2 = await receipt.wait()
console.log(`total ${receipt2.gasUsed}`)

View File

@ -1,5 +1,5 @@
/* global artifacts, web3, contract */
const { expect } = require("chai")
const { expect } = require('chai')
const MerkleTree = require('fixed-merkle-tree')
const { poseidonHash2, randomBN } = require('../src/utils')
const { batchTreeUpdate, prove } = require('../src/controller')

View File

@ -1,16 +1,18 @@
const { expect } = require("chai")
const { expect } = require('chai')
const { toFixedHex, poseidonHash2, randomBN } = require('../src/utils')
const MerkleTree = require('fixed-merkle-tree')
const controller = require('../src/controller')
async function register(note, tornadoTrees, from) {
await tornadoTrees.connect(from).register(
note.instance,
toFixedHex(note.commitment),
toFixedHex(note.nullifierHash),
note.depositBlock,
note.withdrawalBlock,
)
await tornadoTrees
.connect(from)
.register(
note.instance,
toFixedHex(note.commitment),
toFixedHex(note.nullifierHash),
note.depositBlock,
note.withdrawalBlock,
)
}
const toEns = (addr) => toFixedHex(addr, 20).padEnd(66, '0')
@ -27,7 +29,7 @@ const instances = [
const blocks = ['0xaaaaaaaa', '0xbbbbbbbb', '0xcccccccc', '0xdddddddd']
describe("TornadoTrees", function() {
describe('TornadoTrees', function () {
let tree
let operator
let tornadoProxy
@ -36,14 +38,14 @@ describe("TornadoTrees", function() {
let notes
let events
beforeEach(async function() {
beforeEach(async function () {
tree = new MerkleTree(levels, [], { hashFunction: poseidonHash2 })
;[operator, tornadoProxy] = await ethers.getSigners()
const BatchTreeUpdateVerifier = await ethers.getContractFactory("BatchTreeUpdateVerifier")
const BatchTreeUpdateVerifier = await ethers.getContractFactory('BatchTreeUpdateVerifier')
verifier = await BatchTreeUpdateVerifier.deploy()
const TornadoTrees = await ethers.getContractFactory("TornadoTreesMock")
const TornadoTrees = await ethers.getContractFactory('TornadoTreesMock')
tornadoTrees = await TornadoTrees.deploy(
toEns(operator.address),
toEns(tornadoProxy.address),
@ -71,7 +73,7 @@ describe("TornadoTrees", function() {
}))
})
it("Should calculate hash", async function() {
it('Should calculate hash', async function () {
const data = await controller.batchTreeUpdate(tree, events)
const solHash = await tornadoTrees.updateDepositTreeMock(
toFixedHex(data.oldRoot),
@ -82,7 +84,7 @@ describe("TornadoTrees", function() {
expect(solHash).to.be.equal(data.argsHash)
})
it("Should calculate hash", async function() {
it('Should calculate hash', async function () {
const data = await controller.batchTreeUpdate(tree, events)
const proof = await controller.prove(data, './artifacts/circuits/BatchTreeUpdate')
await tornadoTrees.updateDepositTree(