diff --git a/CHANGELOG.md b/CHANGELOG.md index f6d821b36..0add13fc8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,11 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +## [9.5.3] +### Fixed +- [#11103](https://github.com/MetaMask/metamask-extension/pull/11103): Fixes bug that made MetaMask unusable and displayed 'Minified React error #130' on certain networks and accounts +- [#11015](https://github.com/MetaMask/metamask-extension/pull/11015): Prevent big number error when attempting to view transaction list + ## [9.5.2] ### Fixed - [#11071](https://github.com/MetaMask/metamask-extension/pull/11071): Fixing address entry error when sending a transaction on a custom network @@ -2230,7 +2235,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Uncategorized - Added the ability to restore accounts from seed words. -[Unreleased]: https://github.com/MetaMask/metamask-extension/compare/v9.5.2...HEAD +[Unreleased]: https://github.com/MetaMask/metamask-extension/compare/v9.5.3...HEAD +[9.5.3]: https://github.com/MetaMask/metamask-extension/compare/v9.5.2...v9.5.3 [9.5.2]: https://github.com/MetaMask/metamask-extension/compare/v9.5.1...v9.5.2 [9.5.1]: https://github.com/MetaMask/metamask-extension/compare/v9.5.0...v9.5.1 [9.5.0]: https://github.com/MetaMask/metamask-extension/compare/v9.4.0...v9.5.0 diff --git a/app/manifest/_base.json b/app/manifest/_base.json index 07664ccd7..bff810ebb 100644 --- a/app/manifest/_base.json +++ b/app/manifest/_base.json @@ -71,6 +71,6 @@ "notifications" ], "short_name": "__MSG_appName__", - "version": "9.5.2", + "version": "9.5.3", "web_accessible_resources": ["inpage.js", "phishing.html"] } diff --git a/app/scripts/controllers/ens/index.js b/app/scripts/controllers/ens/index.js index a7d4c696b..85477ef7e 100644 --- a/app/scripts/controllers/ens/index.js +++ b/app/scripts/controllers/ens/index.js @@ -1,8 +1,8 @@ import punycode from 'punycode/punycode'; -import { toChecksumAddress } from 'ethereumjs-util'; import { ObservableStore } from '@metamask/obs-store'; import log from 'loglevel'; import { CHAIN_ID_TO_NETWORK_ID_MAP } from '../../../../shared/constants/network'; +import { toChecksumHexAddress } from '../../../../shared/modules/hexstring-utils'; import Ens from './ens'; const ZERO_ADDRESS = '0x0000000000000000000000000000000000000000'; @@ -43,7 +43,7 @@ export default class EnsController { } reverseResolveAddress(address) { - return this._reverseResolveAddress(toChecksumAddress(address)); + return this._reverseResolveAddress(toChecksumHexAddress(address)); } async _reverseResolveAddress(address) { @@ -79,7 +79,7 @@ export default class EnsController { return undefined; } - if (toChecksumAddress(registeredAddress) !== address) { + if (toChecksumHexAddress(registeredAddress) !== address) { return undefined; } diff --git a/app/scripts/controllers/preferences.js b/app/scripts/controllers/preferences.js index 9470df50c..95b0e1e4d 100644 --- a/app/scripts/controllers/preferences.js +++ b/app/scripts/controllers/preferences.js @@ -2,12 +2,12 @@ import { strict as assert } from 'assert'; import { ObservableStore } from '@metamask/obs-store'; import { ethErrors } from 'eth-rpc-errors'; import { normalize as normalizeAddress } from 'eth-sig-util'; -import { isValidAddress } from 'ethereumjs-util'; import ethers from 'ethers'; import log from 'loglevel'; import { LISTED_CONTRACT_ADDRESSES } from '../../../shared/constants/tokens'; import { NETWORK_TYPE_TO_ID_MAP } from '../../../shared/constants/network'; import { isPrefixedFormattedHexString } from '../../../shared/modules/network.utils'; +import { isValidHexAddress } from '../../../shared/modules/hexstring-utils'; import { NETWORK_EVENTS } from './network'; export default class PreferencesController { @@ -836,7 +836,7 @@ export default class PreferencesController { `Invalid decimals "${decimals}": must be 0 <= 36.`, ); } - if (!isValidAddress(address)) { + if (!isValidHexAddress(address, { allowNonPrefixed: false })) { throw ethErrors.rpc.invalidParams(`Invalid address "${address}".`); } } diff --git a/app/scripts/controllers/token-rates.js b/app/scripts/controllers/token-rates.js index b725b04c9..1bba3f389 100644 --- a/app/scripts/controllers/token-rates.js +++ b/app/scripts/controllers/token-rates.js @@ -1,8 +1,8 @@ import { ObservableStore } from '@metamask/obs-store'; import log from 'loglevel'; import { normalize as normalizeAddress } from 'eth-sig-util'; -import { toChecksumAddress } from 'ethereumjs-util'; import getFetchWithTimeout from '../../../shared/modules/fetch-with-timeout'; +import { toChecksumHexAddress } from '../../../shared/modules/hexstring-utils'; const fetchWithTimeout = getFetchWithTimeout(30000); @@ -45,7 +45,7 @@ export default class TokenRatesController { this._tokens.forEach((token) => { const price = prices[token.address.toLowerCase()] || - prices[toChecksumAddress(token.address)]; + prices[toChecksumHexAddress(token.address)]; contractExchangeRates[normalizeAddress(token.address)] = price ? price[nativeCurrency] : 0; diff --git a/app/scripts/controllers/transactions/lib/util.js b/app/scripts/controllers/transactions/lib/util.js index 70652a3c1..804f43ec2 100644 --- a/app/scripts/controllers/transactions/lib/util.js +++ b/app/scripts/controllers/transactions/lib/util.js @@ -1,7 +1,7 @@ -import { isValidAddress } from 'ethereumjs-util'; import { ethErrors } from 'eth-rpc-errors'; import { addHexPrefix } from '../../../lib/util'; import { TRANSACTION_STATUSES } from '../../../../../shared/constants/transaction'; +import { isValidHexAddress } from '../../../../../shared/modules/hexstring-utils'; const normalizers = { from: (from) => addHexPrefix(from), @@ -110,7 +110,7 @@ export function validateFrom(txParams) { `Invalid "from" address "${txParams.from}": not a string.`, ); } - if (!isValidAddress(txParams.from)) { + if (!isValidHexAddress(txParams.from, { allowNonPrefixed: false })) { throw ethErrors.rpc.invalidParams('Invalid "from" address.'); } } @@ -128,7 +128,10 @@ export function validateRecipient(txParams) { } else { throw ethErrors.rpc.invalidParams('Invalid "to" address.'); } - } else if (txParams.to !== undefined && !isValidAddress(txParams.to)) { + } else if ( + txParams.to !== undefined && + !isValidHexAddress(txParams.to, { allowNonPrefixed: false }) + ) { throw ethErrors.rpc.invalidParams('Invalid "to" address.'); } return txParams; diff --git a/app/scripts/controllers/transactions/tx-state-manager.js b/app/scripts/controllers/transactions/tx-state-manager.js index 9eaa260a1..eed1f6d79 100644 --- a/app/scripts/controllers/transactions/tx-state-manager.js +++ b/app/scripts/controllers/transactions/tx-state-manager.js @@ -187,28 +187,43 @@ export default class TransactionStateManager extends EventEmitter { const transactions = this.getTransactions({ filterToCurrentNetwork: false, }); - const txCount = transactions.length; const { txHistoryLimit } = this; // checks if the length of the tx history is longer then desired persistence // limit and then if it is removes the oldest confirmed or rejected tx. // Pending or unapproved transactions will not be removed by this - // operation. + // operation. For safety of presenting a fully functional transaction UI + // representation, this function will not break apart transactions with the + // same nonce, per network. Not accounting for transactions of the same + // nonce and network combo can result in confusing or broken experiences + // in the UI. // // TODO: we are already limiting what we send to the UI, and in the future // we will send UI only collected groups of transactions *per page* so at // some point in the future, this persistence limit can be adjusted. When // we do that I think we should figure out a better storage solution for // transaction history entries. - if (txCount > txHistoryLimit - 1) { - const index = transactions.findIndex((metaTx) => { - return getFinalStates().includes(metaTx.status); - }); - if (index !== -1) { - this._deleteTransaction(transactions[index].id); - } - } + const nonceNetworkSet = new Set(); + const txsToDelete = transactions + .reverse() + .filter((tx) => { + const { nonce } = tx.txParams; + const { chainId, metamaskNetworkId, status } = tx; + const key = `${nonce}-${chainId ?? metamaskNetworkId}`; + if (nonceNetworkSet.has(key)) { + return false; + } else if ( + nonceNetworkSet.size < txHistoryLimit - 1 || + getFinalStates().includes(status) === false + ) { + nonceNetworkSet.add(key); + return false; + } + return true; + }) + .map((tx) => tx.id); + this._deleteTransactions(txsToDelete); this._addTransactionsToState([txMeta]); return txMeta; } @@ -612,4 +627,20 @@ export default class TransactionStateManager extends EventEmitter { transactions, }); } + + /** + * removes multiple transaction from state. This is not intended for external use. + * + * @private + * @param {number[]} targetTransactionIds - the transactions to delete + */ + _deleteTransactions(targetTransactionIds) { + const { transactions } = this.store.getState(); + targetTransactionIds.forEach((transactionId) => { + delete transactions[transactionId]; + }); + this.store.updateState({ + transactions, + }); + } } diff --git a/app/scripts/controllers/transactions/tx-state-manager.test.js b/app/scripts/controllers/transactions/tx-state-manager.test.js index 79c764643..895fb3626 100644 --- a/app/scripts/controllers/transactions/tx-state-manager.test.js +++ b/app/scripts/controllers/transactions/tx-state-manager.test.js @@ -1,8 +1,13 @@ import { strict as assert } from 'assert'; import sinon from 'sinon'; -import { TRANSACTION_STATUSES } from '../../../../shared/constants/transaction'; +import { + TRANSACTION_STATUSES, + TRANSACTION_TYPES, +} from '../../../../shared/constants/transaction'; import { KOVAN_CHAIN_ID, + MAINNET_CHAIN_ID, + RINKEBY_CHAIN_ID, KOVAN_NETWORK_ID, } from '../../../../shared/constants/network'; import TxStateManager from './tx-state-manager'; @@ -10,6 +15,36 @@ import { snapshotFromTxMeta } from './lib/tx-state-history-helpers'; const VALID_ADDRESS = '0x0000000000000000000000000000000000000000'; const VALID_ADDRESS_TWO = '0x0000000000000000000000000000000000000001'; + +function generateTransactions( + numToGen, + { + chainId, + to, + from, + status, + type = TRANSACTION_TYPES.SENT_ETHER, + nonce = (i) => `${i}`, + }, +) { + const txs = []; + for (let i = 0; i < numToGen; i++) { + const tx = { + id: i, + time: new Date() * i, + status: typeof status === 'function' ? status(i) : status, + chainId: typeof chainId === 'function' ? chainId(i) : chainId, + txParams: { + nonce: nonce(i), + to, + from, + }, + type: typeof type === 'function' ? type(i) : type, + }; + txs.push(tx); + } + return txs; +} describe('TransactionStateManager', function () { let txStateManager; const currentNetworkId = KOVAN_NETWORK_ID; @@ -540,19 +575,13 @@ describe('TransactionStateManager', function () { it('cuts off early txs beyond a limit', function () { const limit = txStateManager.txHistoryLimit; - for (let i = 0; i < limit + 1; i++) { - const tx = { - id: i, - time: new Date(), - status: TRANSACTION_STATUSES.CONFIRMED, - metamaskNetworkId: currentNetworkId, - txParams: { - to: VALID_ADDRESS, - from: VALID_ADDRESS, - }, - }; - txStateManager.addTransaction(tx); - } + const txs = generateTransactions(limit + 1, { + chainId: currentChainId, + to: VALID_ADDRESS, + from: VALID_ADDRESS_TWO, + status: TRANSACTION_STATUSES.CONFIRMED, + }); + txs.forEach((tx) => txStateManager.addTransaction(tx)); const result = txStateManager.getTransactions(); assert.equal(result.length, limit, `limit of ${limit} txs enforced`); assert.equal(result[0].id, 1, 'early txs truncated'); @@ -560,52 +589,42 @@ describe('TransactionStateManager', function () { it('cuts off early txs beyond a limit whether or not it is confirmed or rejected', function () { const limit = txStateManager.txHistoryLimit; - for (let i = 0; i < limit + 1; i++) { - const tx = { - id: i, - time: new Date(), - status: TRANSACTION_STATUSES.REJECTED, - metamaskNetworkId: currentNetworkId, - txParams: { - to: VALID_ADDRESS, - from: VALID_ADDRESS, - }, - }; - txStateManager.addTransaction(tx); - } + const txs = generateTransactions(limit + 1, { + chainId: currentChainId, + to: VALID_ADDRESS, + from: VALID_ADDRESS_TWO, + status: TRANSACTION_STATUSES.REJECTED, + }); + txs.forEach((tx) => txStateManager.addTransaction(tx)); const result = txStateManager.getTransactions(); assert.equal(result.length, limit, `limit of ${limit} txs enforced`); assert.equal(result[0].id, 1, 'early txs truncated'); }); it('cuts off early txs beyond a limit but does not cut unapproved txs', function () { - const unconfirmedTx = { - id: 0, - time: new Date(), - status: TRANSACTION_STATUSES.UNAPPROVED, - metamaskNetworkId: currentNetworkId, - txParams: { - to: VALID_ADDRESS, - from: VALID_ADDRESS, - }, - }; - txStateManager.addTransaction(unconfirmedTx); const limit = txStateManager.txHistoryLimit; - for (let i = 1; i < limit + 1; i++) { - const tx = { - id: i, - time: new Date(), - status: TRANSACTION_STATUSES.CONFIRMED, - metamaskNetworkId: currentNetworkId, - txParams: { - to: VALID_ADDRESS, - from: VALID_ADDRESS, - }, - }; - txStateManager.addTransaction(tx); - } + const txs = generateTransactions( + // we add two transactions over limit here to first insert the must be always present + // unapproved tx, then another to force the original logic of adding + // one more beyond the first additional. + limit + 2, + { + chainId: currentChainId, + to: VALID_ADDRESS, + from: VALID_ADDRESS_TWO, + status: (i) => + i === 0 + ? TRANSACTION_STATUSES.UNAPPROVED + : TRANSACTION_STATUSES.CONFIRMED, + }, + ); + txs.forEach((tx) => txStateManager.addTransaction(tx)); const result = txStateManager.getTransactions(); - assert.equal(result.length, limit, `limit of ${limit} txs enforced`); + assert.equal( + result.length, + limit + 1, + `limit of ${limit} + 1 for the unapproved tx is enforced`, + ); assert.equal(result[0].id, 0, 'first tx should still be there'); assert.equal( result[0].status, @@ -614,6 +633,118 @@ describe('TransactionStateManager', function () { ); assert.equal(result[1].id, 2, 'early txs truncated'); }); + + it('cuts off entire groups of transactions by nonce when adding new transaction', function () { + const limit = txStateManager.txHistoryLimit; + // In this test case the earliest two transactions are a dropped attempted ether send and a + // following cancel transaction with the same nonce. these two transactions should be dropped + // together as soon as the 11th unique nonce is attempted to be added. We use limit + 2 to + // first get into the state where we are over the "limit" of transactions because of a set + // of transactions with a unique nonce/network combo, then add an additional new transaction + // to trigger the removal of one group of nonces. + const txs = generateTransactions(limit + 2, { + chainId: currentChainId, + to: VALID_ADDRESS, + from: VALID_ADDRESS_TWO, + nonce: (i) => (i === 1 ? `0` : `${i}`), + status: (i) => + i === 0 + ? TRANSACTION_STATUSES.DROPPED + : TRANSACTION_STATUSES.CONFIRMED, + type: (i) => + i === 1 ? TRANSACTION_TYPES.CANCEL : TRANSACTION_STATUSES.SENT_ETHER, + }); + txs.forEach((tx) => txStateManager.addTransaction(tx)); + const result = txStateManager.getTransactions(); + assert.equal(result.length, limit, `limit of ${limit} is enforced`); + assert.notEqual(result[0].id, 0, 'first tx should be removed'); + assert.equal( + result.some( + (tx) => + tx.status === TRANSACTION_STATUSES.DROPPED || + tx.status === TRANSACTION_TYPES.CANCEL, + ), + false, + 'the cancel and dropped transactions should not be present in the result', + ); + }); + + it('cuts off entire groups of transactions by nonce + network when adding new transaction', function () { + const limit = txStateManager.txHistoryLimit; + // In this test case the earliest two transactions are a dropped attempted ether send and a + // following cancel transaction with the same nonce. Then, a bit later the same scenario on a + // different network. The first two transactions should be dropped after adding even another + // single transaction but the other shouldn't be dropped until adding the fifth additional + // transaction + const txs = generateTransactions(limit + 5, { + chainId: (i) => { + if (i === 0 || i === 1) return MAINNET_CHAIN_ID; + else if (i === 4 || i === 5) return RINKEBY_CHAIN_ID; + return currentChainId; + }, + to: VALID_ADDRESS, + from: VALID_ADDRESS_TWO, + nonce: (i) => ([0, 1, 4, 5].includes(i) ? '0' : `${i}`), + status: (i) => + i === 0 || i === 4 + ? TRANSACTION_STATUSES.DROPPED + : TRANSACTION_STATUSES.CONFIRMED, + type: (i) => + i === 1 || i === 5 + ? TRANSACTION_TYPES.CANCEL + : TRANSACTION_STATUSES.SENT_ETHER, + }); + txs.forEach((tx) => txStateManager.addTransaction(tx)); + const result = txStateManager.getTransactions({ + filterToCurrentNetwork: false, + }); + + assert.equal( + result.length, + limit + 1, + `limit of ${limit} + 1 for the grouped transactions is enforced`, + ); + // The first group of transactions on mainnet should be removed + assert.equal( + result.some( + (tx) => + tx.chainId === MAINNET_CHAIN_ID && tx.txParams.nonce === '0x0', + ), + false, + 'the mainnet transactions with nonce 0x0 should not be present in the result', + ); + }); + + it('does not cut off entire groups of transactions when adding new transaction when under limit', function () { + // In this test case the earliest two transactions are a dropped attempted ether send and a + // following cancel transaction with the same nonce. Then, a bit later the same scenario on a + // different network. None of these should be dropped because we haven't yet reached the limit + const limit = txStateManager.txHistoryLimit; + const txs = generateTransactions(limit - 1, { + chainId: (i) => ([0, 1, 4, 5].includes(i) ? currentChainId : '0x1'), + to: VALID_ADDRESS, + from: VALID_ADDRESS_TWO, + nonce: (i) => { + if (i === 1) return '0'; + else if (i === 5) return '4'; + return `${i}`; + }, + status: (i) => + i === 0 || i === 4 + ? TRANSACTION_STATUSES.DROPPED + : TRANSACTION_STATUSES.CONFIRMED, + type: (i) => + i === 1 || i === 5 + ? TRANSACTION_TYPES.CANCEL + : TRANSACTION_STATUSES.SENT_ETHER, + }); + txs.forEach((tx) => txStateManager.addTransaction(tx)); + const result = txStateManager.getTransactions({ + filterToCurrentNetwork: false, + }); + assert.equal(result.length, 9, `all nine transactions should be present`); + assert.equal(result[0].id, 0, 'first tx should be present'); + }); }); describe('#updateTransaction', function () { diff --git a/app/scripts/lib/typed-message-manager.js b/app/scripts/lib/typed-message-manager.js index 28e7a2534..df0488df8 100644 --- a/app/scripts/lib/typed-message-manager.js +++ b/app/scripts/lib/typed-message-manager.js @@ -3,12 +3,12 @@ import assert from 'assert'; import { ObservableStore } from '@metamask/obs-store'; import { ethErrors } from 'eth-rpc-errors'; import { typedSignatureHash, TYPED_MESSAGE_SCHEMA } from 'eth-sig-util'; -import { isValidAddress } from 'ethereumjs-util'; import log from 'loglevel'; import jsonschema from 'jsonschema'; import { MESSAGE_TYPE } from '../../../shared/constants/app'; import { METAMASK_CONTROLLER_EVENTS } from '../metamask-controller'; import createId from '../../../shared/modules/random-id'; +import { isValidHexAddress } from '../../../shared/modules/hexstring-utils'; /** * Represents, and contains data about, an 'eth_signTypedData' type signature request. These are created when a @@ -160,7 +160,8 @@ export default class TypedMessageManager extends EventEmitter { assert.ok('data' in params, 'Params must include a "data" field.'); assert.ok('from' in params, 'Params must include a "from" field.'); assert.ok( - typeof params.from === 'string' && isValidAddress(params.from), + typeof params.from === 'string' && + isValidHexAddress(params.from, { allowNonPrefixed: false }), '"from" field must be a valid, lowercase, hexadecimal Ethereum address string.', ); diff --git a/app/scripts/metamask-controller.js b/app/scripts/metamask-controller.js index 00b3115c8..93faa0b48 100644 --- a/app/scripts/metamask-controller.js +++ b/app/scripts/metamask-controller.js @@ -10,7 +10,7 @@ import createSubscriptionManager from 'eth-json-rpc-filters/subscriptionManager' import providerAsMiddleware from 'eth-json-rpc-middleware/providerAsMiddleware'; import KeyringController from 'eth-keyring-controller'; import { Mutex } from 'await-semaphore'; -import { toChecksumAddress, stripHexPrefix } from 'ethereumjs-util'; +import { stripHexPrefix } from 'ethereumjs-util'; import log from 'loglevel'; import TrezorKeyring from 'eth-trezor-keyring'; import LedgerBridgeKeyring from '@metamask/eth-ledger-bridge-keyring'; @@ -27,6 +27,7 @@ import { import { TRANSACTION_STATUSES } from '../../shared/constants/transaction'; import { MAINNET_CHAIN_ID } from '../../shared/constants/network'; import { UI_NOTIFICATIONS } from '../../shared/notifications'; +import { toChecksumHexAddress } from '../../shared/modules/hexstring-utils'; import ComposableObservableStore from './lib/ComposableObservableStore'; import AccountTracker from './lib/account-tracker'; @@ -1107,14 +1108,14 @@ export default class MetamaskController extends EventEmitter { // Filter ERC20 tokens const filteredAccountTokens = {}; Object.keys(accountTokens).forEach((address) => { - const checksummedAddress = toChecksumAddress(address); + const checksummedAddress = toChecksumHexAddress(address); filteredAccountTokens[checksummedAddress] = {}; Object.keys(accountTokens[address]).forEach((chainId) => { filteredAccountTokens[checksummedAddress][chainId] = chainId === MAINNET_CHAIN_ID ? accountTokens[address][chainId].filter( ({ address: tokenAddress }) => { - const checksumAddress = toChecksumAddress(tokenAddress); + const checksumAddress = toChecksumHexAddress(tokenAddress); return contractMap[checksumAddress] ? contractMap[checksumAddress].erc20 : true; @@ -1151,10 +1152,10 @@ export default class MetamaskController extends EventEmitter { const accounts = { hd: hdAccounts .filter((item, pos) => hdAccounts.indexOf(item) === pos) - .map((address) => toChecksumAddress(address)), + .map((address) => toChecksumHexAddress(address)), simpleKeyPair: simpleKeyPairAccounts .filter((item, pos) => simpleKeyPairAccounts.indexOf(item) === pos) - .map((address) => toChecksumAddress(address)), + .map((address) => toChecksumHexAddress(address)), ledger: [], trezor: [], }; @@ -1164,7 +1165,7 @@ export default class MetamaskController extends EventEmitter { let { transactions } = this.txController.store.getState(); // delete tx for other accounts that we're not importing transactions = Object.values(transactions).filter((tx) => { - const checksummedTxFrom = toChecksumAddress(tx.txParams.from); + const checksummedTxFrom = toChecksumHexAddress(tx.txParams.from); return accounts.hd.includes(checksummedTxFrom); }); diff --git a/app/scripts/migrations/039.js b/app/scripts/migrations/039.js index 7dcf904de..aa288553a 100644 --- a/app/scripts/migrations/039.js +++ b/app/scripts/migrations/039.js @@ -1,5 +1,5 @@ import { cloneDeep } from 'lodash'; -import { toChecksumAddress } from 'ethereumjs-util'; +import { toChecksumHexAddress } from '../../../shared/modules/hexstring-utils'; const version = 39; @@ -12,7 +12,7 @@ function isOldDai(token = {}) { token && typeof token === 'object' && token.symbol === DAI_V1_TOKEN_SYMBOL && - toChecksumAddress(token.address) === DAI_V1_CONTRACT_ADDRESS + toChecksumHexAddress(token.address) === DAI_V1_CONTRACT_ADDRESS ); } diff --git a/app/scripts/migrations/059.js b/app/scripts/migrations/059.js new file mode 100644 index 000000000..bbec1b9b8 --- /dev/null +++ b/app/scripts/migrations/059.js @@ -0,0 +1,52 @@ +import { + cloneDeep, + concat, + groupBy, + keyBy, + pickBy, + isPlainObject, +} from 'lodash'; +import { TRANSACTION_TYPES } from '../../../shared/constants/transaction'; + +const version = 59; + +/** + * Removes orphaned cancel and retry transactions that no longer have the + * original transaction in state, which results in bugs. + */ +export default { + version, + async migrate(originalVersionedData) { + const versionedData = cloneDeep(originalVersionedData); + versionedData.meta.version = version; + const state = versionedData.data; + versionedData.data = transformState(state); + return versionedData; + }, +}; + +function transformState(state) { + const transactions = state?.TransactionController?.transactions; + if (isPlainObject(transactions)) { + const nonceNetworkGroupedObject = groupBy( + Object.values(transactions), + (tx) => { + return `${tx.txParams?.nonce}-${tx.chainId ?? tx.metamaskNetworkId}`; + }, + ); + + const withoutOrphans = pickBy(nonceNetworkGroupedObject, (group) => { + return group.some( + (tx) => + tx.type !== TRANSACTION_TYPES.CANCEL && + tx.type !== TRANSACTION_TYPES.RETRY, + ); + }); + state.TransactionController.transactions = keyBy( + concat(...Object.values(withoutOrphans)), + (tx) => tx.id, + ); + } + + return state; +} diff --git a/app/scripts/migrations/059.test.js b/app/scripts/migrations/059.test.js new file mode 100644 index 000000000..bdf4263c2 --- /dev/null +++ b/app/scripts/migrations/059.test.js @@ -0,0 +1,385 @@ +import { strict as assert } from 'assert'; +import { cloneDeep } from 'lodash'; +import { + KOVAN_CHAIN_ID, + MAINNET_CHAIN_ID, + RINKEBY_CHAIN_ID, + GOERLI_CHAIN_ID, +} from '../../../shared/constants/network'; +import { + TRANSACTION_TYPES, + TRANSACTION_STATUSES, +} from '../../../shared/constants/transaction'; +import migration59 from './059'; + +const ERRONEOUS_TRANSACTION_STATE = { + 0: { + type: TRANSACTION_TYPES.CANCEL, + id: 0, + chainId: MAINNET_CHAIN_ID, + txParams: { + nonce: '0x0', + }, + }, + 1: { + type: TRANSACTION_TYPES.SENT_ETHER, + id: 1, + chainId: MAINNET_CHAIN_ID, + txParams: { + nonce: '0x1', + }, + }, + 2: { + type: TRANSACTION_TYPES.SENT_ETHER, + id: 2, + chainId: KOVAN_CHAIN_ID, + txParams: { + nonce: '0x2', + }, + }, + 3: { + type: TRANSACTION_TYPES.SENT_ETHER, + id: 3, + chainId: RINKEBY_CHAIN_ID, + txParams: { + nonce: '0x3', + }, + }, + 4: { + type: TRANSACTION_TYPES.SENT_ETHER, + id: 4, + chainId: RINKEBY_CHAIN_ID, + txParams: { + nonce: '0x4', + }, + }, + 5: { + type: TRANSACTION_TYPES.SENT_ETHER, + id: 5, + chainId: MAINNET_CHAIN_ID, + txParams: { + nonce: '0x5', + }, + }, + 6: { + type: TRANSACTION_TYPES.SENT_ETHER, + id: 6, + chainId: KOVAN_CHAIN_ID, + txParams: { + nonce: '0x6', + }, + }, + 7: { + type: TRANSACTION_TYPES.SENT_ETHER, + id: 7, + chainId: RINKEBY_CHAIN_ID, + txParams: { + nonce: '0x7', + }, + }, + 8: { + type: TRANSACTION_TYPES.SENT_ETHER, + id: 8, + chainId: RINKEBY_CHAIN_ID, + txParams: { + nonce: '0x8', + }, + }, + 9: { + type: TRANSACTION_TYPES.SENT_ETHER, + id: 9, + chainId: RINKEBY_CHAIN_ID, + status: TRANSACTION_STATUSES.UNAPPROVED, + }, +}; + +const ERRONEOUS_TRANSACTION_STATE_RETRY = { + ...ERRONEOUS_TRANSACTION_STATE, + 0: { + ...ERRONEOUS_TRANSACTION_STATE[0], + type: TRANSACTION_TYPES.RETRY, + }, +}; + +const ERRONEOUS_TRANSACTION_STATE_MIXED = { + ...ERRONEOUS_TRANSACTION_STATE, + 10: { + type: TRANSACTION_TYPES.RETRY, + id: 10, + chainId: MAINNET_CHAIN_ID, + txParams: { + nonce: '0xa', + }, + }, + 11: { + type: TRANSACTION_TYPES.RETRY, + id: 11, + chainId: MAINNET_CHAIN_ID, + txParams: { + nonce: '0xb', + }, + }, +}; + +describe('migration #59', function () { + it('should update the version metadata', async function () { + const oldStorage = { + meta: { + version: 58, + }, + data: {}, + }; + + const newStorage = await migration59.migrate(oldStorage); + assert.deepEqual(newStorage.meta, { + version: 59, + }); + }); + + it('should drop orphaned cancel transactions', async function () { + const oldStorage = { + meta: {}, + data: { + TransactionController: { + transactions: ERRONEOUS_TRANSACTION_STATE, + }, + foo: 'bar', + }, + }; + + const newStorage = await migration59.migrate(oldStorage); + const EXPECTED = cloneDeep(ERRONEOUS_TRANSACTION_STATE); + delete EXPECTED['0']; + assert.deepEqual(newStorage.data, { + TransactionController: { + transactions: EXPECTED, + }, + foo: 'bar', + }); + }); + + it('should drop orphaned cancel transactions even if a nonce exists on another network that is confirmed', async function () { + const oldStorage = { + meta: {}, + data: { + TransactionController: { + transactions: { + ...ERRONEOUS_TRANSACTION_STATE, + 11: { + ...ERRONEOUS_TRANSACTION_STATE['0'], + id: 11, + chainId: GOERLI_CHAIN_ID, + type: TRANSACTION_TYPES.SENT_ETHER, + }, + }, + }, + foo: 'bar', + }, + }; + + const newStorage = await migration59.migrate(oldStorage); + const EXPECTED = cloneDeep( + oldStorage.data.TransactionController.transactions, + ); + delete EXPECTED['0']; + assert.deepEqual(newStorage.data, { + TransactionController: { + transactions: EXPECTED, + }, + foo: 'bar', + }); + }); + + it('should not drop cancel transactions with matching non cancel or retry in same network and nonce', async function () { + const oldStorage = { + meta: {}, + data: { + TransactionController: { + transactions: { + ...ERRONEOUS_TRANSACTION_STATE, + 11: { + ...ERRONEOUS_TRANSACTION_STATE['0'], + id: 11, + type: TRANSACTION_TYPES.SENT_ETHER, + }, + }, + }, + foo: 'bar', + }, + }; + + const newStorage = await migration59.migrate(oldStorage); + assert.deepEqual(newStorage.data, { + TransactionController: { + transactions: oldStorage.data.TransactionController.transactions, + }, + foo: 'bar', + }); + }); + + it('should drop orphaned retry transactions', async function () { + const oldStorage = { + meta: {}, + data: { + TransactionController: { + transactions: ERRONEOUS_TRANSACTION_STATE_RETRY, + }, + foo: 'bar', + }, + }; + + const newStorage = await migration59.migrate(oldStorage); + const EXPECTED = cloneDeep(ERRONEOUS_TRANSACTION_STATE_RETRY); + delete EXPECTED['0']; + assert.deepEqual(newStorage.data, { + TransactionController: { + transactions: EXPECTED, + }, + foo: 'bar', + }); + }); + + it('should drop orphaned retry transactions even if a nonce exists on another network that is confirmed', async function () { + const oldStorage = { + meta: {}, + data: { + TransactionController: { + transactions: { + ...ERRONEOUS_TRANSACTION_STATE_RETRY, + 11: { + ...ERRONEOUS_TRANSACTION_STATE_RETRY['0'], + id: 11, + chainId: GOERLI_CHAIN_ID, + type: TRANSACTION_TYPES.SENT_ETHER, + }, + }, + }, + foo: 'bar', + }, + }; + + const newStorage = await migration59.migrate(oldStorage); + const EXPECTED = cloneDeep( + oldStorage.data.TransactionController.transactions, + ); + delete EXPECTED['0']; + assert.deepEqual(newStorage.data, { + TransactionController: { + transactions: EXPECTED, + }, + foo: 'bar', + }); + }); + + it('should not drop retry transactions with matching non cancel or retry in same network and nonce', async function () { + const oldStorage = { + meta: {}, + data: { + TransactionController: { + transactions: { + ...ERRONEOUS_TRANSACTION_STATE_RETRY, + 11: { + ...ERRONEOUS_TRANSACTION_STATE_RETRY['0'], + id: 11, + type: TRANSACTION_TYPES.SENT_ETHER, + }, + }, + }, + foo: 'bar', + }, + }; + + const newStorage = await migration59.migrate(oldStorage); + assert.deepEqual(newStorage.data, { + TransactionController: { + transactions: oldStorage.data.TransactionController.transactions, + }, + foo: 'bar', + }); + }); + + it('should drop all orphaned retry and cancel transactions', async function () { + const oldStorage = { + meta: {}, + data: { + TransactionController: { + transactions: ERRONEOUS_TRANSACTION_STATE_MIXED, + }, + foo: 'bar', + }, + }; + + const newStorage = await migration59.migrate(oldStorage); + // The following ERRONEOUS_TRANSACTION_STATE object only has one orphan in it + // so using it as the base for our expected output automatically removes a few + // transactions we expect to be missing. + const EXPECTED = cloneDeep(ERRONEOUS_TRANSACTION_STATE); + delete EXPECTED['0']; + assert.deepEqual(newStorage.data, { + TransactionController: { + transactions: EXPECTED, + }, + foo: 'bar', + }); + }); + + it('should do nothing if transactions state does not exist', async function () { + const oldStorage = { + meta: {}, + data: { + TransactionController: { + bar: 'baz', + }, + IncomingTransactionsController: { + foo: 'baz', + }, + foo: 'bar', + }, + }; + + const newStorage = await migration59.migrate(oldStorage); + assert.deepEqual(oldStorage.data, newStorage.data); + }); + + it('should do nothing if transactions state is empty', async function () { + const oldStorage = { + meta: {}, + data: { + TransactionController: { + transactions: {}, + bar: 'baz', + }, + foo: 'bar', + }, + }; + + const newStorage = await migration59.migrate(oldStorage); + assert.deepEqual(oldStorage.data, newStorage.data); + }); + + it('should do nothing if transactions state is not an object', async function () { + const oldStorage = { + meta: {}, + data: { + TransactionController: { + transactions: [], + bar: 'baz', + }, + foo: 'bar', + }, + }; + + const newStorage = await migration59.migrate(oldStorage); + assert.deepEqual(oldStorage.data, newStorage.data); + }); + + it('should do nothing if state is empty', async function () { + const oldStorage = { + meta: {}, + data: {}, + }; + + const newStorage = await migration59.migrate(oldStorage); + assert.deepEqual(oldStorage.data, newStorage.data); + }); +}); diff --git a/app/scripts/migrations/index.js b/app/scripts/migrations/index.js index b0c1716f5..47925fbba 100644 --- a/app/scripts/migrations/index.js +++ b/app/scripts/migrations/index.js @@ -63,6 +63,7 @@ const migrations = [ require('./056').default, require('./057').default, require('./058').default, + require('./059').default, ]; export default migrations; diff --git a/shared/constants/transaction.js b/shared/constants/transaction.js index 44a777a94..5eedd6086 100644 --- a/shared/constants/transaction.js +++ b/shared/constants/transaction.js @@ -1,3 +1,5 @@ +import { MESSAGE_TYPE } from './app'; + /** * Transaction Type is a MetaMask construct used internally * @typedef {Object} TransactionTypes @@ -51,6 +53,11 @@ export const TRANSACTION_TYPES = { DEPLOY_CONTRACT: 'contractDeployment', SWAP: 'swap', SWAP_APPROVAL: 'swapApproval', + SIGN: MESSAGE_TYPE.ETH_SIGN, + SIGN_TYPED_DATA: MESSAGE_TYPE.ETH_SIGN_TYPED_DATA, + PERSONAL_SIGN: MESSAGE_TYPE.PERSONAL_SIGN, + ETH_DECRYPT: MESSAGE_TYPE.ETH_DECRYPT, + ETH_GET_ENCRYPTION_PUBLIC_KEY: MESSAGE_TYPE.ETH_GET_ENCRYPTION_PUBLIC_KEY, }; /** diff --git a/shared/modules/hexstring-utils.js b/shared/modules/hexstring-utils.js new file mode 100644 index 000000000..1f895a3c7 --- /dev/null +++ b/shared/modules/hexstring-utils.js @@ -0,0 +1,73 @@ +import { + isHexString, + isValidAddress, + isValidChecksumAddress, + addHexPrefix, + toChecksumAddress, +} from 'ethereumjs-util'; + +export const BURN_ADDRESS = '0x0000000000000000000000000000000000000000'; + +export function isBurnAddress(address) { + return address === BURN_ADDRESS; +} + +/** + * Validates that the input is a hex address. This utility method is a thin + * wrapper around ethereumjs-util.isValidAddress, with the exception that it + * does not throw an error when provided values that are not hex strings. In + * addition, and by default, this method will return true for hex strings that + * meet the length requirement of a hex address, but are not prefixed with `0x` + * Finally, if the mixedCaseUseChecksum flag is true and a mixed case string is + * provided this method will validate it has the proper checksum formatting. + * @param {string} possibleAddress - Input parameter to check against + * @param {Object} [options] - options bag + * @param {boolean} [options.allowNonPrefixed] - If true will first ensure '0x' + * is prepended to the string + * @param {boolean} [options.mixedCaseUseChecksum] - If true will treat mixed + * case addresses as checksum addresses and validate that proper checksum + * format is used + * @returns {boolean} whether or not the input is a valid hex address + */ +export function isValidHexAddress( + possibleAddress, + { allowNonPrefixed = true, mixedCaseUseChecksum = false } = {}, +) { + const addressToCheck = allowNonPrefixed + ? addHexPrefix(possibleAddress) + : possibleAddress; + if (!isHexString(addressToCheck)) { + return false; + } + + if (mixedCaseUseChecksum) { + const prefixRemoved = addressToCheck.slice(2); + const lower = prefixRemoved.toLowerCase(); + const upper = prefixRemoved.toUpperCase(); + const allOneCase = prefixRemoved === lower || prefixRemoved === upper; + if (!allOneCase) { + return isValidChecksumAddress(addressToCheck); + } + } + + return isValidAddress(addressToCheck); +} + +export function toChecksumHexAddress(address) { + if (!address) { + // our internal checksumAddress function that this method replaces would + // return an empty string for nullish input. If any direct usages of + // ethereumjs-util.toChecksumAddress were called with nullish input it + // would have resulted in an error on version 5.1. + return ''; + } + const hexPrefixed = addHexPrefix(address); + if (!isHexString(hexPrefixed)) { + // Version 5.1 of ethereumjs-utils would have returned '0xY' for input 'y' + // but we shouldn't waste effort trying to change case on a clearly invalid + // string. Instead just return the hex prefixed original string which most + // closely mimics the original behavior. + return hexPrefixed; + } + return toChecksumAddress(addHexPrefix(address)); +} diff --git a/shared/modules/hexstring-utils.test.js b/shared/modules/hexstring-utils.test.js new file mode 100644 index 000000000..3d292451e --- /dev/null +++ b/shared/modules/hexstring-utils.test.js @@ -0,0 +1,57 @@ +import { strict as assert } from 'assert'; +import { toChecksumAddress } from 'ethereumjs-util'; +import { isValidHexAddress } from './hexstring-utils'; + +describe('hexstring utils', function () { + describe('isValidHexAddress', function () { + it('should allow 40-char non-prefixed hex', function () { + const address = 'fdea65c8e26263f6d9a1b5de9555d2931a33b825'; + const result = isValidHexAddress(address); + assert.equal(result, true); + }); + + it('should allow 42-char prefixed hex', function () { + const address = '0xfdea65c8e26263f6d9a1b5de9555d2931a33b825'; + const result = isValidHexAddress(address); + assert.equal(result, true); + }); + + it('should NOT allow 40-char non-prefixed hex when allowNonPrefixed is false', function () { + const address = 'fdea65c8e26263f6d9a1b5de9555d2931a33b825'; + const result = isValidHexAddress(address, { allowNonPrefixed: false }); + assert.equal(result, false); + }); + + it('should NOT allow any length of non hex-prefixed string', function () { + const address = 'fdea65c8e26263f6d9a1b5de9555d2931a33b85'; + const result = isValidHexAddress(address); + assert.equal(result, false); + }); + + it('should NOT allow less than 42 character hex-prefixed string', function () { + const address = '0xfdea65ce26263f6d9a1b5de9555d2931a33b85'; + const result = isValidHexAddress(address); + assert.equal(result, false); + }); + + it('should recognize correct capitalized checksum', function () { + const address = '0xFDEa65C8e26263F6d9A1B5de9555D2931A33b825'; + const result = isValidHexAddress(address, { mixedCaseUseChecksum: true }); + assert.equal(result, true); + }); + + it('should recognize incorrect capitalized checksum', function () { + const address = '0xFDea65C8e26263F6d9A1B5de9555D2931A33b825'; + const result = isValidHexAddress(address, { mixedCaseUseChecksum: true }); + assert.equal(result, false); + }); + + it('should recognize this sample hashed address', function () { + const address = '0x5Fda30Bb72B8Dfe20e48A00dFc108d0915BE9Bb0'; + const result = isValidHexAddress(address, { mixedCaseUseChecksum: true }); + const hashed = toChecksumAddress(address.toLowerCase()); + assert.equal(hashed, address); + assert.equal(result, true); + }); + }); +}); diff --git a/ui/app/components/app/account-list-item/account-list-item-component.test.js b/ui/app/components/app/account-list-item/account-list-item-component.test.js index 11d5ba6eb..03c038cf4 100644 --- a/ui/app/components/app/account-list-item/account-list-item-component.test.js +++ b/ui/app/components/app/account-list-item/account-list-item-component.test.js @@ -1,18 +1,19 @@ import React from 'react'; import { shallow } from 'enzyme'; import sinon from 'sinon'; -import * as utils from '../../../helpers/utils/util'; import Identicon from '../../ui/identicon'; +import { toChecksumHexAddress } from '../../../../../shared/modules/hexstring-utils'; import AccountListItem from './account-list-item'; +jest.mock('../../../../../shared/modules/hexstring-utils', () => ({ + toChecksumHexAddress: jest.fn(() => 'mockCheckSumAddress'), +})); + describe('AccountListItem Component', () => { - let wrapper, propsMethodSpies, checksumAddressStub; + let wrapper, propsMethodSpies; describe('render', () => { beforeAll(() => { - checksumAddressStub = sinon - .stub(utils, 'checksumAddress') - .returns('mockCheckSumAddress'); propsMethodSpies = { handleClick: sinon.spy(), }; @@ -36,7 +37,6 @@ describe('AccountListItem Component', () => { afterEach(() => { propsMethodSpies.handleClick.resetHistory(); - checksumAddressStub.resetHistory(); }); afterAll(() => { @@ -126,9 +126,7 @@ describe('AccountListItem Component', () => { expect( wrapper.find('.account-list-item__account-address').text(), ).toStrictEqual('mockCheckSumAddress'); - expect(checksumAddressStub.getCall(0).args).toStrictEqual([ - 'mockAddress', - ]); + expect(toChecksumHexAddress).toHaveBeenCalledWith('mockAddress'); }); it('should not render the account address as a checksumAddress if displayAddress is false', () => { diff --git a/ui/app/components/app/account-list-item/account-list-item.js b/ui/app/components/app/account-list-item/account-list-item.js index 9b0c0e38f..fd1b80bc4 100644 --- a/ui/app/components/app/account-list-item/account-list-item.js +++ b/ui/app/components/app/account-list-item/account-list-item.js @@ -1,8 +1,8 @@ import React from 'react'; import PropTypes from 'prop-types'; -import { checksumAddress } from '../../../helpers/utils/util'; import Identicon from '../../ui/identicon'; import AccountMismatchWarning from '../../ui/account-mismatch-warning/account-mismatch-warning.component'; +import { toChecksumHexAddress } from '../../../../../shared/modules/hexstring-utils'; export default function AccountListItem({ account, @@ -34,7 +34,7 @@ export default function AccountListItem({ {displayAddress && name && (