This commit is contained in:
smart_ex 2022-05-18 18:20:06 +10:00
parent 8bc5b7be9e
commit ae61e3ec96
13 changed files with 471 additions and 1929 deletions

View File

@ -6,8 +6,7 @@
},
"extends": [
"eslint:recommended",
"plugin:@typescript-eslint/recommended",
"plugin:security/recommended"
"plugin:@typescript-eslint/recommended"
],
"parser": "@typescript-eslint/parser",
"parserOptions": {

View File

@ -48,7 +48,6 @@
"eslint": "^8.14.0",
"eslint-config-prettier": "^6.12.0",
"eslint-plugin-prettier": "^3.1.4",
"eslint-plugin-security": "^1.5.0",
"mocha": "^8.1.3",
"nodemon": "^2.0.15",
"ts-node": "^10.7.0",

View File

@ -1,57 +0,0 @@
import { postJob } from './queue';
import { jobType } from './types';
const {
getTornadoWithdrawInputError,
getMiningRewardInputError,
getMiningWithdrawInputError,
} = require('./validator');
async function tornadoWithdraw(req, res) {
const inputError = getTornadoWithdrawInputError(req.body);
if (inputError) {
console.log('Invalid input:', inputError);
return res.status(400).json({ error: inputError });
}
const id = await postJob({
type: jobType.TORNADO_WITHDRAW,
request: req.body,
});
return res.json({ id });
}
async function miningReward(req, res) {
const inputError = getMiningRewardInputError(req.body);
if (inputError) {
console.log('Invalid input:', inputError);
return res.status(400).json({ error: inputError });
}
const id = await postJob({
type: jobType.MINING_REWARD,
request: req.body,
});
return res.json({ id });
}
async function miningWithdraw(req, res) {
const inputError = getMiningWithdrawInputError(req.body);
if (inputError) {
console.log('Invalid input:', inputError);
return res.status(400).json({ error: inputError });
}
const id = await postJob({
type: jobType.MINING_WITHDRAW,
request: req.body,
});
return res.json({ id });
}
module.exports = {
tornadoWithdraw,
miningReward,
miningWithdraw,
};

View File

@ -1,27 +0,0 @@
const Web3 = require('web3');
const Redis = require('ioredis');
const { toBN, fromWei } = require('web3-utils');
const { setSafeInterval } = require('./utils');
const { redisUrl, httpRpcUrl, privateKey, minimumBalance } = require('./config');
const web3 = new Web3(httpRpcUrl);
const redis = new Redis(redisUrl);
async function main() {
try {
const { address } = web3.eth.accounts.privateKeyToAccount(privateKey);
const balance = await web3.eth.getBalance(address);
if (toBN(balance).lt(toBN(minimumBalance))) {
throw new Error(`Not enough balance, less than ${fromWei(minimumBalance)} ETH`);
}
await redis.hset('health', { status: true, error: '' });
} catch (e) {
console.error('healthWatcher', e.message);
await redis.hset('health', { status: false, error: e.message });
}
}
setSafeInterval(main, 30 * 1000);

View File

@ -1,46 +0,0 @@
const Redis = require('ioredis');
const { redisUrl, offchainOracleAddress, oracleRpcUrl } = require('./config');
const { getArgsForOracle, setSafeInterval } = require('./utils');
const { toChecksumAddress } = require('web3-utils');
const redis = new Redis(redisUrl);
const Web3 = require('web3');
const web3 = new Web3(
new Web3.providers.HttpProvider(oracleRpcUrl, {
timeout: 200000, // ms
}),
);
const offchainOracleABI = require('../abis/OffchainOracle.abi.json');
const offchainOracle = new web3.eth.Contract(offchainOracleABI, offchainOracleAddress);
const { tokenAddresses, oneUintAmount, currencyLookup } = getArgsForOracle();
const { toBN } = require('web3-utils');
async function main() {
try {
const ethPrices = {};
for (let i = 0; i < tokenAddresses.length; i++) {
try {
const isWrap =
toChecksumAddress(tokenAddresses[i]) ===
toChecksumAddress('0x5d3a536E4D6DbD6114cc1Ead35777bAB948E3643');
const price = await offchainOracle.methods.getRateToEth(tokenAddresses[i], isWrap).call();
const numerator = toBN(oneUintAmount[i]);
const denominator = toBN(10).pow(toBN(18)); // eth decimals
const priceFormatted = toBN(price).mul(numerator).div(denominator);
ethPrices[currencyLookup[tokenAddresses[i]]] = priceFormatted.toString();
} catch (e) {
console.error('cant get price of ', tokenAddresses[i]);
}
}
await redis.hmset('prices', ethPrices);
console.log('Wrote following prices to redis', ethPrices);
} catch (e) {
console.error('priceWatcher error', e);
}
}
setSafeInterval(main, 30 * 1000);

View File

@ -1,57 +0,0 @@
const { v4: uuid } = require('uuid');
const Queue = require('bull');
const Redis = require('ioredis');
const { redisUrl } = require('./config');
const { status } = require('./types');
const redis = new Redis(redisUrl);
const queue = new Queue('proofs', redisUrl, {
lockDuration: 300000, // Key expiration time for job locks.
lockRenewTime: 30000, // Interval on which to acquire the job lock
stalledInterval: 30000, // How often check for stalled jobs (use 0 for never checking).
maxStalledCount: 3, // Max amount of times a stalled job will be re-processed.
guardInterval: 5000, // Poll interval for delayed jobs and added jobs.
retryProcessDelay: 5000, // delay before processing next job in case of internal error.
drainDelay: 5, // A timeout for when the queue is in drained state (empty waiting for jobs).
});
async function postJob({ type, request }) {
const id = uuid();
const job = await queue.add(
{
id,
type,
status: status.QUEUED,
...request, // proof, args, ?contract
},
{
//removeOnComplete: true
},
);
await redis.set(`job:${id}`, job.id);
return id;
}
async function getJob(uuid) {
const id = await redis.get(`job:${uuid}`);
return queue.getJobFromId(id);
}
async function getJobStatus(uuid) {
const job = await getJob(uuid);
if (!job) {
return null;
}
return {
...job.data,
failedReason: job.failedReason,
};
}
module.exports = {
postJob,
getJob,
getJobStatus,
queue,
};

View File

@ -1,30 +0,0 @@
const { httpRpcUrl, aggregatorAddress } = require('./config');
const Web3 = require('web3');
const web3 = new Web3(httpRpcUrl);
const aggregator = new web3.eth.Contract(require('../abis/Aggregator.abi.json'), aggregatorAddress);
const ens = require('eth-ens-namehash');
class ENSResolver {
constructor() {
this.addresses = {};
}
async resolve(domains) {
if (!Array.isArray(domains)) {
domains = [domains];
}
const unresolved = domains.filter(d => !this.addresses[d]);
if (unresolved.length) {
const resolved = await aggregator.methods.bulkResolve(unresolved.map(ens.hash)).call();
for (let i = 0; i < resolved.length; i++) {
this.addresses[domains[i]] = resolved[i];
}
}
const addresses = domains.map(domain => this.addresses[domain]);
return addresses.length === 1 ? addresses[0] : addresses;
}
}
module.exports = ENSResolver;

View File

@ -1,41 +0,0 @@
const queue = require('./queue')
const { netId, tornadoServiceFee, miningServiceFee, instances, redisUrl, rewardAccount } = require('./config')
const { version } = require('../package.json')
const Redis = require('ioredis')
const redis = new Redis(redisUrl)
async function status(req, res) {
const ethPrices = await redis.hgetall('prices')
const health = await redis.hgetall('health')
const { waiting: currentQueue } = await queue.queue.getJobCounts()
res.json({
rewardAccount,
instances: instances[`netId${netId}`],
netId,
ethPrices,
tornadoServiceFee,
miningServiceFee,
version,
health,
currentQueue,
})
}
function index(req, res) {
res.send(
'This is <a href=https://tornado.cash>tornado.cash</a> Relayer service. Check the <a href=/v1/status>/status</a> for settings',
)
}
async function getJob(req, res) {
const status = await queue.getJobStatus(req.params.id)
return status ? res.json(status) : res.status(400).json({ error: "The job doesn't exist" })
}
module.exports = {
status,
index,
getJob,
}

View File

@ -1,146 +0,0 @@
const MerkleTree = require('fixed-merkle-tree');
const { redisUrl, wsRpcUrl, minerMerkleTreeHeight, torn, netId } = require('./config');
const { poseidonHash2 } = require('./utils');
const { toBN } = require('web3-utils');
const Redis = require('ioredis');
const redis = new Redis(redisUrl);
const ENSResolver = require('./resolver');
const resolver = new ENSResolver();
const Web3 = require('web3');
const web3 = new Web3(
new Web3.providers.WebsocketProvider(wsRpcUrl, {
clientConfig: {
maxReceivedFrameSize: 100000000,
maxReceivedMessageSize: 100000000,
},
}),
);
const MinerABI = require('../abis/mining.abi.json');
let contract;
// eslint-disable-next-line no-unused-vars
let tree, eventSubscription, blockSubscription;
async function fetchEvents(fromBlock, toBlock) {
if (fromBlock <= toBlock) {
try {
return await contract.getPastEvents('NewAccount', {
fromBlock,
toBlock,
});
} catch (error) {
const midBlock = (fromBlock + toBlock) >> 1;
if (midBlock - fromBlock < 2) {
throw new Error(`error fetching events: ${error.message}`);
}
const arr1 = await fetchEvents(fromBlock, midBlock);
const arr2 = await fetchEvents(midBlock + 1, toBlock);
return [...arr1, ...arr2];
}
}
return [];
}
async function processNewEvent(err, event) {
if (err) {
throw new Error(`Event handler error: ${err}`);
// console.error(err)
// return
}
console.log(
`New account event
Index: ${event.returnValues.index}
Commitment: ${event.returnValues.commitment}
Nullifier: ${event.returnValues.nullifier}
EncAcc: ${event.returnValues.encryptedAccount}`,
);
const { commitment, index } = event.returnValues;
if (tree.elements().length === Number(index)) {
tree.insert(toBN(commitment));
await updateRedis();
} else if (tree.elements().length === Number(index) + 1) {
console.log('Replacing element', index);
tree.update(index, toBN(commitment));
await updateRedis();
} else {
console.log(`Invalid element index ${index}, rebuilding tree`);
rebuild();
}
}
async function processNewBlock(err) {
if (err) {
throw new Error(`Event handler error: ${err}`);
// console.error(err)
// return
}
// what if updateRedis takes more than 15 sec?
await updateRedis();
}
async function updateRedis() {
const rootOnContract = await contract.methods.getLastAccountRoot().call();
if (!tree.root().eq(toBN(rootOnContract))) {
console.log(`Invalid tree root: ${tree.root()} != ${toBN(rootOnContract)}, rebuilding tree`);
rebuild();
return;
}
const rootInRedis = await redis.get('tree:root');
if (!rootInRedis || !tree.root().eq(toBN(rootInRedis))) {
const serializedTree = JSON.stringify(tree.serialize());
await redis.set('tree:elements', serializedTree);
await redis.set('tree:root', tree.root().toString());
await redis.publish('treeUpdate', tree.root().toString());
console.log('Updated tree in redis, new root:', tree.root().toString());
} else {
console.log('Tree in redis is up to date, skipping update');
}
}
function rebuild() {
process.exit(1);
// await eventSubscription.unsubscribe()
// await blockSubscription.unsubscribe()
// setTimeout(init, 3000)
}
async function init() {
try {
console.log('Initializing');
const miner = await resolver.resolve(torn.miningV2.address);
contract = new web3.eth.Contract(MinerABI, miner);
const cachedEvents = require(`../cache/accounts_farmer_${netId}.json`);
const cachedCommitments = cachedEvents.map(e => toBN(e.commitment));
const toBlock = await web3.eth.getBlockNumber();
const [{ blockNumber: fromBlock }] = cachedEvents.slice(-1);
const newEvents = await fetchEvents(fromBlock + 1, toBlock);
const newCommitments = newEvents
.sort((a, b) => a.returnValues.index - b.returnValues.index)
.map(e => toBN(e.returnValues.commitment))
.filter((item, index, arr) => !index || item !== arr[index - 1]);
const commitments = cachedCommitments.concat(newCommitments);
tree = new MerkleTree(minerMerkleTreeHeight, commitments, { hashFunction: poseidonHash2 });
await updateRedis();
console.log(`Rebuilt tree with ${commitments.length} elements, root: ${tree.root()}`);
eventSubscription = contract.events.NewAccount({ fromBlock: toBlock + 1 }, processNewEvent);
blockSubscription = web3.eth.subscribe('newBlockHeaders', processNewBlock);
} catch (e) {
console.error('error on init treeWatcher', e.message);
}
}
init();
process.on('unhandledRejection', error => {
console.error('Unhandled promise rejection', error);
process.exit(1);
});

View File

@ -1,139 +0,0 @@
const { instances, netId } = require('./config');
const { poseidon } = require('circomlib');
const { toBN, toChecksumAddress, BN } = require('web3-utils');
const TOKENS = {
torn: {
tokenAddress: '0x77777FeDdddFfC19Ff86DB637967013e6C6A116C',
symbol: 'TORN',
decimals: 18,
},
};
const addressMap = new Map();
const instance = instances[`netId${netId}`];
for (const [currency, { instanceAddress, symbol, decimals }] of Object.entries(instance)) {
Object.entries(instanceAddress).forEach(([amount, address]) =>
addressMap.set(`${netId}_${address}`, {
currency,
amount,
symbol,
decimals,
}),
);
}
const sleep = ms => new Promise(res => setTimeout(res, ms));
export function getInstance(address) {
const key = `${netId}_${toChecksumAddress(address)}`;
if (addressMap.has(key)) {
return addressMap.get(key);
} else {
throw new Error('Unknown contact address');
}
}
const poseidonHash = items => toBN(poseidon(items).toString());
const poseidonHash2 = (a, b) => poseidonHash([a, b]);
function setSafeInterval(func, interval) {
func()
.catch(console.error)
.finally(() => {
setTimeout(() => setSafeInterval(func, interval), interval);
});
}
/**
* A promise that resolves when the source emits specified event
*/
function when(source, event) {
return new Promise((resolve, reject) => {
source
.once(event, payload => {
resolve(payload);
})
.on('error', error => {
reject(error);
});
});
}
function getArgsForOracle() {
const tokens = {
...instances.netId1,
...TOKENS,
};
const tokenAddresses = [];
const oneUintAmount = [];
const currencyLookup = {};
Object.entries(tokens).map(([currency, data]) => {
if (currency !== 'eth') {
tokenAddresses.push(data.tokenAddress);
oneUintAmount.push(toBN('10').pow(toBN(data.decimals.toString())).toString());
currencyLookup[data.tokenAddress] = currency;
}
});
return { tokenAddresses, oneUintAmount, currencyLookup };
}
function fromDecimals(value, decimals) {
value = value.toString();
let ether = value.toString();
const base = new BN('10').pow(new BN(decimals));
const baseLength = base.toString(10).length - 1 || 1;
const negative = ether.substring(0, 1) === '-';
if (negative) {
ether = ether.substring(1);
}
if (ether === '.') {
throw new Error('[ethjs-unit] while converting number ' + value + ' to wei, invalid value');
}
// Split it into a whole and fractional part
const comps = ether.split('.');
if (comps.length > 2) {
throw new Error('[ethjs-unit] while converting number ' + value + ' to wei, too many decimal points');
}
let whole = comps[0];
let fraction = comps[1];
if (!whole) {
whole = '0';
}
if (!fraction) {
fraction = '0';
}
if (fraction.length > baseLength) {
throw new Error('[ethjs-unit] while converting number ' + value + ' to wei, too many decimal places');
}
while (fraction.length < baseLength) {
fraction += '0';
}
whole = new BN(whole);
fraction = new BN(fraction);
let wei = whole.mul(base).add(fraction);
if (negative) {
wei = wei.mul(negative);
}
return new BN(wei.toString(10), 10);
}
module.exports = {
getInstance,
setSafeInterval,
poseidonHash2,
sleep,
when,
getArgsForOracle,
fromDecimals,
};

View File

@ -1,166 +0,0 @@
const { isAddress, toChecksumAddress } = require('web3-utils');
const { getInstance } = require('./utils');
const { rewardAccount } = require('./config');
const Ajv = require('ajv');
const addressType = { type: 'string', pattern: '^0x[a-fA-F0-9]{40}$', isAddress: true };
const proofType = { type: 'string', pattern: '^0x[a-fA-F0-9]{512}$' };
const encryptedAccountType = { type: 'string', pattern: '^0x[a-fA-F0-9]{392}$' };
const bytes32Type = { type: 'string', pattern: '^0x[a-fA-F0-9]{64}$' };
const instanceType = { ...addressType, isKnownContract: true };
const relayerType = { ...addressType, isFeeRecipient: true };
const tornadoWithdrawSchema = {
type: 'object',
properties: {
proof: proofType,
contract: instanceType,
args: {
type: 'array',
maxItems: 6,
minItems: 6,
items: [bytes32Type, bytes32Type, addressType, relayerType, bytes32Type, bytes32Type],
},
},
additionalProperties: false,
required: ['proof', 'contract', 'args'],
};
const miningRewardSchema = {
type: 'object',
properties: {
proof: proofType,
args: {
type: 'object',
properties: {
rate: bytes32Type,
fee: bytes32Type,
instance: instanceType,
rewardNullifier: bytes32Type,
extDataHash: bytes32Type,
depositRoot: bytes32Type,
withdrawalRoot: bytes32Type,
extData: {
type: 'object',
properties: {
relayer: relayerType,
encryptedAccount: encryptedAccountType,
},
additionalProperties: false,
required: ['relayer', 'encryptedAccount'],
},
account: {
type: 'object',
properties: {
inputRoot: bytes32Type,
inputNullifierHash: bytes32Type,
outputRoot: bytes32Type,
outputPathIndices: bytes32Type,
outputCommitment: bytes32Type,
},
additionalProperties: false,
required: [
'inputRoot',
'inputNullifierHash',
'outputRoot',
'outputPathIndices',
'outputCommitment',
],
},
},
additionalProperties: false,
required: [
'rate',
'fee',
'instance',
'rewardNullifier',
'extDataHash',
'depositRoot',
'withdrawalRoot',
'extData',
'account',
],
},
},
additionalProperties: false,
required: ['proof', 'args'],
};
const miningWithdrawSchema = {
type: 'object',
properties: {
proof: proofType,
args: {
type: 'object',
properties: {
amount: bytes32Type,
extDataHash: bytes32Type,
extData: {
type: 'object',
properties: {
fee: bytes32Type,
recipient: addressType,
relayer: relayerType,
encryptedAccount: encryptedAccountType,
},
additionalProperties: false,
required: ['fee', 'relayer', 'encryptedAccount', 'recipient'],
},
account: {
type: 'object',
properties: {
inputRoot: bytes32Type,
inputNullifierHash: bytes32Type,
outputRoot: bytes32Type,
outputPathIndices: bytes32Type,
outputCommitment: bytes32Type,
},
additionalProperties: false,
required: [
'inputRoot',
'inputNullifierHash',
'outputRoot',
'outputPathIndices',
'outputCommitment',
],
},
},
additionalProperties: false,
required: ['amount', 'extDataHash', 'extData', 'account'],
},
},
additionalProperties: false,
required: ['proof', 'args'],
};
const validateTornadoWithdraw = ajv.compile(tornadoWithdrawSchema);
const validateMiningReward = ajv.compile(miningRewardSchema);
const validateMiningWithdraw = ajv.compile(miningWithdrawSchema);
function getInputError(validator, data) {
validator(data);
if (validator.errors) {
const error = validator.errors[0];
return `${error.dataPath} ${error.message}`;
}
return null;
}
function getTornadoWithdrawInputError(data) {
return getInputError(validateTornadoWithdraw, data);
}
function getMiningRewardInputError(data) {
return getInputError(validateMiningReward, data);
}
function getMiningWithdrawInputError(data) {
return getInputError(validateMiningWithdraw, data);
}
module.exports = {
getTornadoWithdrawInputError,
getMiningRewardInputError,
getMiningWithdrawInputError,
};

View File

@ -1,350 +0,0 @@
const fs = require('fs');
const Web3 = require('web3');
const { toBN, toWei, fromWei, toChecksumAddress } = require('web3-utils');
const MerkleTree = require('fixed-merkle-tree');
const Redis = require('ioredis');
const { GasPriceOracle } = require('gas-price-oracle');
const { Utils, Controller } = require('tornado-anonymity-mining');
// const swapABI = require('../abis/swap.abi.json');
const miningABI = require('../abis/mining.abi.json');
const tornadoABI = require('../abis/tornadoABI.json');
const tornadoProxyABI = require('../abis/tornadoProxyABI.json');
const aggregatorAbi = require('../abis/Aggregator.abi.json');
const { queue } = require('./queue');
const { poseidonHash2, getInstance, fromDecimals, sleep } = require('./utils');
const { jobType, status } = require('./constants');
const {
torn,
netId,
redisUrl,
gasLimits,
instances,
privateKey,
httpRpcUrl,
oracleRpcUrl,
baseFeeReserve,
miningServiceFee,
tornadoServiceFee,
tornadoGoerliProxy,
governanceAddress,
aggregatorAddress,
} = require('./config');
const ENSResolver = require('./resolver');
const resolver = new ENSResolver();
const { TxManager } = require('tx-manager');
let web3;
let currentTx;
let currentJob;
let tree;
let txManager;
let controller;
let swap;
let minerContract;
const redis = new Redis(redisUrl);
const redisSubscribe = new Redis(redisUrl);
const gasPriceOracle = new GasPriceOracle({ defaultRpc: oracleRpcUrl });
async function fetchTree() {
const elements = await redis.get('tree:elements');
const convert = (_, val) => (typeof val === 'string' ? toBN(val) : val);
tree = MerkleTree.deserialize(JSON.parse(elements, convert), poseidonHash2);
if (currentTx && currentJob && ['MINING_REWARD', 'MINING_WITHDRAW'].includes(currentJob.data.type)) {
const { proof, args } = currentJob.data;
if (toBN(args.account.inputRoot).eq(toBN(tree.root()))) {
console.log('Account root is up to date. Skipping Root Update operation...');
return;
} else {
console.log('Account root is outdated. Starting Root Update operation...');
}
const update = await controller.treeUpdate(args.account.outputCommitment, tree);
const minerAddress = await resolver.resolve(torn.miningV2.address);
const instance = new web3.eth.Contract(miningABI, minerAddress);
const data =
currentJob.data.type === 'MINING_REWARD'
? instance.methods.reward(proof, args, update.proof, update.args).encodeABI()
: instance.methods.withdraw(proof, args, update.proof, update.args).encodeABI();
await currentTx.replace({
to: minerAddress,
data,
});
console.log('replaced pending tx');
}
}
async function start() {
try {
web3 = new Web3(httpRpcUrl);
const { CONFIRMATIONS, MAX_GAS_PRICE } = process.env;
txManager = new TxManager({
privateKey,
rpcUrl: httpRpcUrl,
config: {
CONFIRMATIONS,
MAX_GAS_PRICE,
THROW_ON_REVERT: false,
BASE_FEE_RESERVE_PERCENTAGE: baseFeeReserve,
},
});
const provingKeys = {
treeUpdateCircuit: require('../keys/TreeUpdate.json'),
treeUpdateProvingKey: fs.readFileSync('./keys/TreeUpdate_proving_key.bin').buffer,
};
// controller = new Controller({ provingKeys });
// await controller.init();
queue.process(processJob);
console.log('Worker started');
} catch (e) {
console.error('error on start worker', e.message);
}
}
function checkFee({ data }) {
if (data.type === jobType.TORNADO_WITHDRAW) {
return checkTornadoFee(data);
}
// return checkMiningFee(data);
}
async function getGasPrice() {
const block = await web3.eth.getBlock('latest');
if (block && block.baseFeePerGas) {
const baseFeePerGas = toBN(block.baseFeePerGas);
return baseFeePerGas;
}
const { fast } = await gasPriceOracle.gasPrices();
const gasPrice = toBN(toWei(fast.toString(), 'gwei'));
return gasPrice;
}
async function checkTornadoFee({ args, contract }) {
const { currency, amount } = getInstance(contract);
const { decimals } = instances[`netId${netId}`][currency];
const [fee, refund] = [args[4], args[5]].map(toBN);
const gasPrice = await getGasPrice();
const ethPrice = await redis.hget('prices', currency);
const expense = gasPrice.mul(toBN(gasLimits[jobType.TORNADO_WITHDRAW]));
const feePercent = toBN(fromDecimals(amount, decimals))
.mul(toBN(parseInt(tornadoServiceFee * 1e10)))
.div(toBN(1e10 * 100));
let desiredFee;
switch (currency) {
case 'eth': {
desiredFee = expense.add(feePercent);
break;
}
default: {
desiredFee = expense
.add(refund)
.mul(toBN(10 ** decimals))
.div(toBN(ethPrice));
desiredFee = desiredFee.add(feePercent);
break;
}
}
console.log(
'sent fee, desired fee, feePercent',
fromWei(fee.toString()),
fromWei(desiredFee.toString()),
fromWei(feePercent.toString()),
);
if (fee.lt(desiredFee)) {
throw new Error('Provided fee is not enough. Probably it is a Gas Price spike, try to resubmit.');
}
}
// async function checkMiningFee({ args }) {
// const gasPrice = await getGasPrice();
// const ethPrice = await redis.hget('prices', 'torn');
// const isMiningReward = currentJob.data.type === jobType.MINING_REWARD;
// const providedFee = isMiningReward ? toBN(args.fee) : toBN(args.extData.fee);
//
// const expense = gasPrice.mul(toBN(gasLimits[currentJob.data.type]));
// const expenseInTorn = expense.mul(toBN(1e18)).div(toBN(ethPrice));
// // todo make aggregator for ethPrices and rewardSwap data
// const balance = await swap.methods.tornVirtualBalance().call();
// const poolWeight = await swap.methods.poolWeight().call();
// const expenseInPoints = Utils.reverseTornadoFormula({ balance, tokens: expenseInTorn, poolWeight });
// /* eslint-disable */
// const serviceFeePercent = isMiningReward
// ? toBN(0)
// : toBN(args.amount)
// .sub(providedFee) // args.amount includes fee
// .mul(toBN(parseInt(miningServiceFee * 1e10)))
// .div(toBN(1e10 * 100))
// /* eslint-enable */
// const desiredFee = expenseInPoints.add(serviceFeePercent); // in points
// console.log(
// 'user provided fee, desired fee, serviceFeePercent',
// providedFee.toString(),
// desiredFee.toString(),
// serviceFeePercent.toString(),
// );
// if (toBN(providedFee).lt(desiredFee)) {
// throw new Error('Provided fee is not enough. Probably it is a Gas Price spike, try to resubmit.');
// }
// }
async function getProxyContract() {
let proxyAddress;
if (netId === 5) {
proxyAddress = tornadoGoerliProxy;
} else {
proxyAddress = await resolver.resolve(torn.tornadoRouter.address)
}
const contract = new web3.eth.Contract(tornadoProxyABI, proxyAddress);
return {
contract,
isOldProxy: checkOldProxy(proxyAddress),
};
}
function checkOldProxy(address) {
const OLD_PROXY = '0x905b63Fff465B9fFBF41DeA908CEb12478ec7601';
return toChecksumAddress(address) === toChecksumAddress(OLD_PROXY);
}
async function getTxObject({ data }) {
if (data.type === jobType.TORNADO_WITHDRAW) {
let { contract, isOldProxy } = await getProxyContract();
let calldata = contract.methods.withdraw(data.contract, data.proof, ...data.args).encodeABI();
if (isOldProxy && getInstance(data.contract).currency !== 'eth') {
contract = new web3.eth.Contract(tornadoABI, data.contract);
calldata = contract.methods.withdraw(data.proof, ...data.args).encodeABI();
}
return {
value: data.args[5],
to: contract._address,
data: calldata,
gasLimit: gasLimits['WITHDRAW_WITH_EXTRA'],
};
} else {
const method = data.type === jobType.MINING_REWARD ? 'reward' : 'withdraw';
const calldata = minerContract.methods[method](data.proof, data.args).encodeABI();
return {
to: minerContract._address,
data: calldata,
gasLimit: gasLimits[data.type],
};
}
}
async function isOutdatedTreeRevert(receipt, currentTx) {
try {
await web3.eth.call(currentTx.tx, receipt.blockNumber);
console.log('Simulated call successful');
return false;
} catch (e) {
console.log('Decoded revert reason:', e.message);
return (
e.message.indexOf('Outdated account merkle root') !== -1 ||
e.message.indexOf('Outdated tree update merkle root') !== -1
);
}
}
async function processJob(job) {
try {
if (!jobType[job.data.type]) {
throw new Error(`Unknown job type: ${job.data.type}`);
}
currentJob = job;
await updateStatus(status.ACCEPTED);
console.log(`Start processing a new ${job.data.type} job #${job.id}`);
await submitTx(job);
} catch (e) {
console.error('processJob', e.message);
await updateStatus(status.FAILED);
throw e;
}
}
async function submitTx(job, retry = 0) {
await checkFee(job);
currentTx = await txManager.createTx(await getTxObject(job));
if (job.data.type !== jobType.TORNADO_WITHDRAW) {
await fetchTree();
}
try {
const receipt = await currentTx
.send()
.on('transactionHash', txHash => {
updateTxHash(txHash);
updateStatus(status.SENT);
})
.on('mined', receipt => {
console.log('Mined in block', receipt.blockNumber);
updateStatus(status.MINED);
})
.on('confirmations', updateConfirmations);
if (receipt.status === 1) {
await updateStatus(status.CONFIRMED);
} else {
if (job.data.type !== jobType.TORNADO_WITHDRAW && (await isOutdatedTreeRevert(receipt, currentTx))) {
if (retry < 3) {
await updateStatus(status.RESUBMITTED);
await submitTx(job, retry + 1);
} else {
throw new Error('Tree update retry limit exceeded');
}
} else {
throw new Error('Submitted transaction failed');
}
}
} catch (e) {
// todo this could result in duplicated error logs
// todo handle a case where account tree is still not up to date (wait and retry)?
if (
job.data.type !== jobType.TORNADO_WITHDRAW &&
(e.message.indexOf('Outdated account merkle root') !== -1 ||
e.message.indexOf('Outdated tree update merkle root') !== -1)
) {
if (retry < 5) {
await sleep(3000);
console.log('Tree is still not up to date, resubmitting');
await submitTx(job, retry + 1);
} else {
throw new Error('Tree update retry limit exceeded');
}
} else {
throw new Error(`Revert by smart contract ${e.message}`);
}
}
}
async function updateTxHash(txHash) {
console.log(`A new successfully sent tx ${txHash}`);
currentJob.data.txHash = txHash;
await currentJob.update(currentJob.data);
}
async function updateConfirmations(confirmations) {
console.log(`Confirmations count ${confirmations}`);
currentJob.data.confirmations = confirmations;
await currentJob.update(currentJob.data);
}
async function updateStatus(status) {
console.log(`Job status updated ${status}`);
currentJob.data.status = status;
await currentJob.update(currentJob.data);
}
start();

1337
yarn.lock

File diff suppressed because it is too large Load Diff