load events by chunks; add poseidon

This commit is contained in:
Alexey 2021-04-01 21:12:34 +03:00
parent c3a0b0f0fd
commit f7832f6ed3
2 changed files with 45 additions and 16 deletions

View File

@ -9,7 +9,10 @@ const { BigNumber } = ethers
const config = {
rpcUrl: process.env.RPC_URL,
multicallAddress: '0xeefba1e63905ef1d7acba5a8513c70307c1ce441',
multicallAddress:
process.env.NET_ID === '1'
? '0xeefba1e63905ef1d7acba5a8513c70307c1ce441'
: '0x77dca2c955b15e9de4dbbcf1246b4b85b651e50e',
}
const abi = new ethers.utils.AbiCoder()
@ -261,8 +264,8 @@ async function getCommittedWithdrawals() {
fs.writeFileSync('./cache/committedWithdrawals.json', JSON.stringify(events, null, 2))
}
// getCommittedDeposits()
// getCommittedWithdrawals()
getCommittedDeposits()
getCommittedWithdrawals()
// getPendingDeposits()
getPendingDeposits()
getPendingWithdrawals()

View File

@ -2,16 +2,31 @@ const { getTornadoTrees, getProvider } = require('./singletons')
const { action } = require('./utils')
const ethers = require('ethers')
const abi = new ethers.utils.AbiCoder()
// const fs = require('fs')
const fs = require('fs')
const { poseidonHash, toFixedHex } = require('./utils')
async function getTornadoTreesEvents(type, fromBlock, toBlock) {
let events = []
const NUMBER_PARTS = 50
const part = Math.floor((toBlock - fromBlock) / NUMBER_PARTS)
const eventName = type === action.DEPOSIT ? 'DepositData' : 'WithdrawalData'
const events = await getProvider().getLogs({
address: getTornadoTrees().address,
topics: getTornadoTrees().filters[eventName]().topics,
fromBlock,
toBlock,
})
fromBlock = Number(fromBlock)
toBlock = Number(fromBlock) + part
for (let i = 0; i <= NUMBER_PARTS; i++) {
const newEvents = await getProvider().getLogs({
address: getTornadoTrees().address,
topics: getTornadoTrees().filters[eventName]().topics,
fromBlock,
toBlock,
})
events = events.concat(newEvents)
fromBlock = toBlock
toBlock += part
}
return events
.map((e) => {
const { instance, hash, block, index } = getTornadoTrees().interface.parseLog(e).args
@ -37,16 +52,27 @@ async function getMigrationEvents(type) {
const committedFile = type === action.DEPOSIT ? 'committedDeposits' : 'committedWithdrawals'
const committedEvents = require(`../cache/${committedFile}.json`)
const newTreeEvents = await getTornadoTreesEvents(type, 0, 'latest')
const latestBlock = await getProvider().getBlock()
const fromBlock = process.env.NET_ID === 1 ? 12143762 : 4446831
const newTreeEvents = await getTornadoTreesEvents(type, fromBlock, latestBlock.number)
let allEvents = committedEvents.concat(pendingEvents)
const filter = new Set(allEvents.map((a) => a.sha3))
allEvents = allEvents.concat(newTreeEvents.filter((a) => !filter.has(a.sha3)))
allEvents = allEvents.map((e) => ({
...e,
poseidon: toFixedHex(poseidonHash([e.instance, e.hash, e.block])),
}))
console.log('allEvents.slice(-1)', allEvents.slice(-1))
const trees = await getTornadoTrees()
const lastHash = allEvents.slice(-1)[0].hash
const eventType = type === action.DEPOSIT ? 'DepositData' : 'WithdrawalData'
const eventFilter = trees.filters[eventType](null, lastHash)
const event = await trees.queryFilter(eventFilter)
console.log('event', event[0].blockNumber)
// it can be useful to get all necessary events for claiming AP
// fs.writeFileSync(
// `../bot_tornado/events_cache/${type}.json`,
// JSON.stringify(allEvents, null, 2),
// )
fs.writeFileSync(`./cache/${type}.json`, JSON.stringify(allEvents, null, 2))
return {
committedEvents: allEvents.slice(0, committedCount.toNumber()),
pendingEvents: allEvents.slice(committedCount.toNumber()),