Commit 1b6880d0 authored by Mark Tyneway's avatar Mark Tyneway Committed by GitHub

Merge pull request #1786 from ethereum-optimism/develop

Develop -> Master PR
parents 807f62cc dc1eb771
---
'@eth-optimism/contracts': patch
---
Use a gas price of zero for static calls in the deploy process
---
'@eth-optimism/contracts': patch
---
Adds additional deploy step to transfer messenger ownership
---
'@eth-optimism/l2geth': patch
---
Fixes deadlock
...@@ -22,7 +22,10 @@ import ( ...@@ -22,7 +22,10 @@ import (
) )
// NewTxsEvent is posted when a batch of transactions enter the transaction pool. // NewTxsEvent is posted when a batch of transactions enter the transaction pool.
type NewTxsEvent struct{ Txs []*types.Transaction } type NewTxsEvent struct {
Txs []*types.Transaction
ErrCh chan error
}
// NewMinedBlockEvent is posted when a block has been imported. // NewMinedBlockEvent is posted when a block has been imported.
type NewMinedBlockEvent struct{ Block *types.Block } type NewMinedBlockEvent struct{ Block *types.Block }
......
...@@ -1090,7 +1090,7 @@ func (pool *TxPool) runReorg(done chan struct{}, reset *txpoolResetRequest, dirt ...@@ -1090,7 +1090,7 @@ func (pool *TxPool) runReorg(done chan struct{}, reset *txpoolResetRequest, dirt
for _, set := range events { for _, set := range events {
txs = append(txs, set.Flatten()...) txs = append(txs, set.Flatten()...)
} }
pool.txFeed.Send(NewTxsEvent{txs}) pool.txFeed.Send(NewTxsEvent{Txs: txs})
} }
} }
......
...@@ -506,7 +506,10 @@ func (w *worker) mainLoop() { ...@@ -506,7 +506,10 @@ func (w *worker) mainLoop() {
} }
w.pendingMu.Unlock() w.pendingMu.Unlock()
} else { } else {
log.Debug("Problem committing transaction", "msg", err) log.Error("Problem committing transaction", "msg", err)
if ev.ErrCh != nil {
ev.ErrCh <- err
}
} }
case ev := <-w.txsCh: case ev := <-w.txsCh:
...@@ -781,6 +784,11 @@ func (w *worker) commitTransactions(txs *types.TransactionsByPriceAndNonce, coin ...@@ -781,6 +784,11 @@ func (w *worker) commitTransactions(txs *types.TransactionsByPriceAndNonce, coin
} }
var coalescedLogs []*types.Log var coalescedLogs []*types.Log
// UsingOVM
// Keep track of the number of transactions being added to the block.
// Blocks should only have a single transaction. This value is used to
// compute a success return value
var txCount int
for { for {
// In the following three cases, we will interrupt the execution of the transaction. // In the following three cases, we will interrupt the execution of the transaction.
...@@ -814,6 +822,8 @@ func (w *worker) commitTransactions(txs *types.TransactionsByPriceAndNonce, coin ...@@ -814,6 +822,8 @@ func (w *worker) commitTransactions(txs *types.TransactionsByPriceAndNonce, coin
break break
} }
txCount++
// Error may be ignored here. The error has already been checked // Error may be ignored here. The error has already been checked
// during transaction acceptance is the transaction pool. // during transaction acceptance is the transaction pool.
// //
...@@ -881,7 +891,7 @@ func (w *worker) commitTransactions(txs *types.TransactionsByPriceAndNonce, coin ...@@ -881,7 +891,7 @@ func (w *worker) commitTransactions(txs *types.TransactionsByPriceAndNonce, coin
if interrupt != nil { if interrupt != nil {
w.resubmitAdjustCh <- &intervalAdjust{inc: false} w.resubmitAdjustCh <- &intervalAdjust{inc: false}
} }
return false return txCount == 0
} }
// commitNewTx is an OVM addition that mines a block with a single tx in it. // commitNewTx is an OVM addition that mines a block with a single tx in it.
......
...@@ -806,9 +806,9 @@ func (s *SyncService) applyTransactionToTip(tx *types.Transaction) error { ...@@ -806,9 +806,9 @@ func (s *SyncService) applyTransactionToTip(tx *types.Transaction) error {
// Note that Ethereum Layer one consensus rules dictate that the timestamp // Note that Ethereum Layer one consensus rules dictate that the timestamp
// must be strictly increasing between blocks, so no need to check both the // must be strictly increasing between blocks, so no need to check both the
// timestamp and the blocknumber. // timestamp and the blocknumber.
ts := s.GetLatestL1Timestamp()
bn := s.GetLatestL1BlockNumber()
if tx.L1Timestamp() == 0 { if tx.L1Timestamp() == 0 {
ts := s.GetLatestL1Timestamp()
bn := s.GetLatestL1BlockNumber()
tx.SetL1Timestamp(ts) tx.SetL1Timestamp(ts)
tx.SetL1BlockNumber(bn) tx.SetL1BlockNumber(bn)
} else if tx.L1Timestamp() > s.GetLatestL1Timestamp() { } else if tx.L1Timestamp() > s.GetLatestL1Timestamp() {
...@@ -816,17 +816,15 @@ func (s *SyncService) applyTransactionToTip(tx *types.Transaction) error { ...@@ -816,17 +816,15 @@ func (s *SyncService) applyTransactionToTip(tx *types.Transaction) error {
// service's locally maintained timestamp, update the timestamp and // service's locally maintained timestamp, update the timestamp and
// blocknumber to equal that of the transaction's. This should happen // blocknumber to equal that of the transaction's. This should happen
// with `enqueue` transactions. // with `enqueue` transactions.
ts := tx.L1Timestamp() s.SetLatestL1Timestamp(tx.L1Timestamp())
bn := tx.L1BlockNumber() s.SetLatestL1BlockNumber(tx.L1BlockNumber().Uint64())
s.SetLatestL1Timestamp(ts) log.Debug("Updating OVM context based on new transaction", "timestamp", ts, "blocknumber", tx.L1BlockNumber().Uint64(), "queue-origin", tx.QueueOrigin())
s.SetLatestL1BlockNumber(bn.Uint64())
log.Debug("Updating OVM context based on new transaction", "timestamp", ts, "blocknumber", bn.Uint64(), "queue-origin", tx.QueueOrigin())
} else if tx.L1Timestamp() < s.GetLatestL1Timestamp() { } else if tx.L1Timestamp() < s.GetLatestL1Timestamp() {
log.Error("Timestamp monotonicity violation", "hash", tx.Hash().Hex()) log.Error("Timestamp monotonicity violation", "hash", tx.Hash().Hex())
} }
index := s.GetLatestIndex()
if tx.GetMeta().Index == nil { if tx.GetMeta().Index == nil {
index := s.GetLatestIndex()
if index == nil { if index == nil {
tx.SetIndex(0) tx.SetIndex(0)
} else { } else {
...@@ -846,21 +844,36 @@ func (s *SyncService) applyTransactionToTip(tx *types.Transaction) error { ...@@ -846,21 +844,36 @@ func (s *SyncService) applyTransactionToTip(tx *types.Transaction) error {
log.Debug("Applying transaction to tip", "index", *tx.GetMeta().Index, "hash", tx.Hash().Hex(), "origin", tx.QueueOrigin().String()) log.Debug("Applying transaction to tip", "index", *tx.GetMeta().Index, "hash", tx.Hash().Hex(), "origin", tx.QueueOrigin().String())
txs := types.Transactions{tx} txs := types.Transactions{tx}
s.txFeed.Send(core.NewTxsEvent{Txs: txs}) errCh := make(chan error, 1)
s.txFeed.Send(core.NewTxsEvent{
Txs: txs,
ErrCh: errCh,
})
// Block until the transaction has been added to the chain // Block until the transaction has been added to the chain
log.Trace("Waiting for transaction to be added to chain", "hash", tx.Hash().Hex()) log.Trace("Waiting for transaction to be added to chain", "hash", tx.Hash().Hex())
<-s.chainHeadCh
select {
// Update the cache when the transaction is from the owner case err := <-errCh:
// of the gas price oracle log.Error("Got error waiting for transaction to be added to chain", "msg", err)
sender, _ := types.Sender(s.signer, tx) s.SetLatestL1Timestamp(ts)
owner := s.GasPriceOracleOwnerAddress() s.SetLatestL1BlockNumber(bn)
if owner != nil && sender == *owner { s.SetLatestIndex(index)
if err := s.updateGasPriceOracleCache(nil); err != nil { return err
return err case <-s.chainHeadCh:
// Update the cache when the transaction is from the owner
// of the gas price oracle
sender, _ := types.Sender(s.signer, tx)
owner := s.GasPriceOracleOwnerAddress()
if owner != nil && sender == *owner {
if err := s.updateGasPriceOracleCache(nil); err != nil {
s.SetLatestL1Timestamp(ts)
s.SetLatestL1BlockNumber(bn)
s.SetLatestIndex(index)
return err
}
} }
return nil
} }
return nil
} }
// applyBatchedTransaction applies transactions that were batched to layer one. // applyBatchedTransaction applies transactions that were batched to layer one.
......
...@@ -6,7 +6,7 @@ import { ...@@ -6,7 +6,7 @@ import {
fundAccount, fundAccount,
sendImpersonatedTx, sendImpersonatedTx,
BIG_BALANCE, BIG_BALANCE,
} from '../src/hardhat-deploy-ethers' } from '../src/deploy-utils'
import { names } from '../src/address-names' import { names } from '../src/address-names'
import { awaitCondition } from '@eth-optimism/core-utils' import { awaitCondition } from '@eth-optimism/core-utils'
......
...@@ -5,7 +5,7 @@ import { DeployFunction } from 'hardhat-deploy/dist/types' ...@@ -5,7 +5,7 @@ import { DeployFunction } from 'hardhat-deploy/dist/types'
import { import {
deployAndVerifyAndThen, deployAndVerifyAndThen,
getContractFromArtifact, getContractFromArtifact,
} from '../src/hardhat-deploy-ethers' } from '../src/deploy-utils'
import { names } from '../src/address-names' import { names } from '../src/address-names'
const deployFn: DeployFunction = async (hre) => { const deployFn: DeployFunction = async (hre) => {
......
...@@ -5,7 +5,7 @@ import { DeployFunction } from 'hardhat-deploy/dist/types' ...@@ -5,7 +5,7 @@ import { DeployFunction } from 'hardhat-deploy/dist/types'
import { import {
deployAndVerifyAndThen, deployAndVerifyAndThen,
getContractFromArtifact, getContractFromArtifact,
} from '../src/hardhat-deploy-ethers' } from '../src/deploy-utils'
import { names } from '../src/address-names' import { names } from '../src/address-names'
const deployFn: DeployFunction = async (hre) => { const deployFn: DeployFunction = async (hre) => {
......
...@@ -5,7 +5,7 @@ import { DeployFunction } from 'hardhat-deploy/dist/types' ...@@ -5,7 +5,7 @@ import { DeployFunction } from 'hardhat-deploy/dist/types'
import { import {
deployAndVerifyAndThen, deployAndVerifyAndThen,
getContractFromArtifact, getContractFromArtifact,
} from '../src/hardhat-deploy-ethers' } from '../src/deploy-utils'
import { names } from '../src/address-names' import { names } from '../src/address-names'
const deployFn: DeployFunction = async (hre) => { const deployFn: DeployFunction = async (hre) => {
......
...@@ -5,7 +5,7 @@ import { DeployFunction } from 'hardhat-deploy/dist/types' ...@@ -5,7 +5,7 @@ import { DeployFunction } from 'hardhat-deploy/dist/types'
import { import {
deployAndVerifyAndThen, deployAndVerifyAndThen,
getContractFromArtifact, getContractFromArtifact,
} from '../src/hardhat-deploy-ethers' } from '../src/deploy-utils'
import { names } from '../src/address-names' import { names } from '../src/address-names'
const deployFn: DeployFunction = async (hre) => { const deployFn: DeployFunction = async (hre) => {
......
...@@ -5,7 +5,7 @@ import { DeployFunction } from 'hardhat-deploy/dist/types' ...@@ -5,7 +5,7 @@ import { DeployFunction } from 'hardhat-deploy/dist/types'
import { import {
deployAndVerifyAndThen, deployAndVerifyAndThen,
getContractFromArtifact, getContractFromArtifact,
} from '../src/hardhat-deploy-ethers' } from '../src/deploy-utils'
import { names } from '../src/address-names' import { names } from '../src/address-names'
const deployFn: DeployFunction = async (hre) => { const deployFn: DeployFunction = async (hre) => {
......
...@@ -6,7 +6,7 @@ import { hexStringEquals, awaitCondition } from '@eth-optimism/core-utils' ...@@ -6,7 +6,7 @@ import { hexStringEquals, awaitCondition } from '@eth-optimism/core-utils'
import { import {
deployAndVerifyAndThen, deployAndVerifyAndThen,
getContractFromArtifact, getContractFromArtifact,
} from '../src/hardhat-deploy-ethers' } from '../src/deploy-utils'
import { names } from '../src/address-names' import { names } from '../src/address-names'
const deployFn: DeployFunction = async (hre) => { const deployFn: DeployFunction = async (hre) => {
...@@ -25,7 +25,7 @@ const deployFn: DeployFunction = async (hre) => { ...@@ -25,7 +25,7 @@ const deployFn: DeployFunction = async (hre) => {
// a proxy. However, it's best practice to initialize it anyway just in case there's // a proxy. However, it's best practice to initialize it anyway just in case there's
// some unknown security hole. It also prevents another user from appearing like an // some unknown security hole. It also prevents another user from appearing like an
// official address because it managed to call the initialization function. // official address because it managed to call the initialization function.
console.log(`Initializing L1CrossDomainMessenger...`) console.log(`Initializing L1CrossDomainMessenger (implementation)...`)
await contract.initialize(Lib_AddressManager.address) await contract.initialize(Lib_AddressManager.address)
console.log(`Checking that contract was correctly initialized...`) console.log(`Checking that contract was correctly initialized...`)
...@@ -39,6 +39,23 @@ const deployFn: DeployFunction = async (hre) => { ...@@ -39,6 +39,23 @@ const deployFn: DeployFunction = async (hre) => {
5000, 5000,
100 100
) )
// Same thing as above, we want to transfer ownership of this contract to the owner of the
// AddressManager. Not technically necessary but seems like the right thing to do.
console.log(
`Transferring ownership of L1CrossDomainMessenger (implementation)...`
)
const owner = (hre as any).deployConfig.ovmAddressManagerOwner
await contract.transferOwnership(owner)
console.log(`Checking that contract owner was correctly set...`)
await awaitCondition(
async () => {
return hexStringEquals(await contract.owner(), owner)
},
5000,
100
)
}, },
}) })
} }
......
...@@ -5,7 +5,7 @@ import { DeployFunction } from 'hardhat-deploy/dist/types' ...@@ -5,7 +5,7 @@ import { DeployFunction } from 'hardhat-deploy/dist/types'
import { import {
deployAndVerifyAndThen, deployAndVerifyAndThen,
getContractFromArtifact, getContractFromArtifact,
} from '../src/hardhat-deploy-ethers' } from '../src/deploy-utils'
import { names } from '../src/address-names' import { names } from '../src/address-names'
const deployFn: DeployFunction = async (hre) => { const deployFn: DeployFunction = async (hre) => {
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
import { DeployFunction } from 'hardhat-deploy/dist/types' import { DeployFunction } from 'hardhat-deploy/dist/types'
/* Imports: Internal */ /* Imports: Internal */
import { deployAndVerifyAndThen } from '../src/hardhat-deploy-ethers' import { deployAndVerifyAndThen } from '../src/deploy-utils'
import { names } from '../src/address-names' import { names } from '../src/address-names'
const deployFn: DeployFunction = async (hre) => { const deployFn: DeployFunction = async (hre) => {
......
...@@ -6,7 +6,7 @@ import { hexStringEquals } from '@eth-optimism/core-utils' ...@@ -6,7 +6,7 @@ import { hexStringEquals } from '@eth-optimism/core-utils'
import { import {
deployAndVerifyAndThen, deployAndVerifyAndThen,
getContractFromArtifact, getContractFromArtifact,
} from '../src/hardhat-deploy-ethers' } from '../src/deploy-utils'
import { names } from '../src/address-names' import { names } from '../src/address-names'
import { predeploys } from '../src/predeploys' import { predeploys } from '../src/predeploys'
......
...@@ -3,10 +3,7 @@ import { hexStringEquals, awaitCondition } from '@eth-optimism/core-utils' ...@@ -3,10 +3,7 @@ import { hexStringEquals, awaitCondition } from '@eth-optimism/core-utils'
import { DeployFunction } from 'hardhat-deploy/dist/types' import { DeployFunction } from 'hardhat-deploy/dist/types'
/* Imports: Internal */ /* Imports: Internal */
import { import { getContractFromArtifact, isHardhatNode } from '../src/deploy-utils'
getContractFromArtifact,
isHardhatNode,
} from '../src/hardhat-deploy-ethers'
import { names } from '../src/address-names' import { names } from '../src/address-names'
const deployFn: DeployFunction = async (hre) => { const deployFn: DeployFunction = async (hre) => {
......
...@@ -3,14 +3,12 @@ import { DeployFunction } from 'hardhat-deploy/dist/types' ...@@ -3,14 +3,12 @@ import { DeployFunction } from 'hardhat-deploy/dist/types'
import { hexStringEquals, awaitCondition } from '@eth-optimism/core-utils' import { hexStringEquals, awaitCondition } from '@eth-optimism/core-utils'
/* Imports: Internal */ /* Imports: Internal */
import { getContractFromArtifact } from '../src/hardhat-deploy-ethers' import { getContractFromArtifact } from '../src/deploy-utils'
import { names } from '../src/address-names' import { names } from '../src/address-names'
const deployFn: DeployFunction = async (hre) => { const deployFn: DeployFunction = async (hre) => {
const { deployer } = await hre.getNamedAccounts() const { deployer } = await hre.getNamedAccounts()
console.log(`Initializing Proxy__L1CrossDomainMessenger...`)
// There's a risk that we could get front-run during a fresh deployment, which would brick this // There's a risk that we could get front-run during a fresh deployment, which would brick this
// contract and require that the proxy be re-deployed. We will not have this risk once we move // contract and require that the proxy be re-deployed. We will not have this risk once we move
// entirely to chugsplash-style deployments. It's unlikely to happen and relatively easy to // entirely to chugsplash-style deployments. It's unlikely to happen and relatively easy to
...@@ -29,6 +27,7 @@ const deployFn: DeployFunction = async (hre) => { ...@@ -29,6 +27,7 @@ const deployFn: DeployFunction = async (hre) => {
names.unmanaged.Lib_AddressManager names.unmanaged.Lib_AddressManager
) )
console.log(`Initializing Proxy__OVM_L1CrossDomainMessenger...`)
await Proxy__OVM_L1CrossDomainMessenger.initialize(Lib_AddressManager.address) await Proxy__OVM_L1CrossDomainMessenger.initialize(Lib_AddressManager.address)
console.log(`Checking that contract was correctly initialized...`) console.log(`Checking that contract was correctly initialized...`)
...@@ -42,6 +41,22 @@ const deployFn: DeployFunction = async (hre) => { ...@@ -42,6 +41,22 @@ const deployFn: DeployFunction = async (hre) => {
5000, 5000,
100 100
) )
console.log(`Setting Proxy__OVM_L1CrossDomainMessenger owner...`)
const owner = (hre as any).deployConfig.ovmAddressManagerOwner
await Proxy__OVM_L1CrossDomainMessenger.transferOwnership(owner)
console.log(`Checking that the contract owner was correctly set...`)
await awaitCondition(
async () => {
return hexStringEquals(
await Proxy__OVM_L1CrossDomainMessenger.owner(),
owner
)
},
5000,
100
)
} }
deployFn.tags = ['finalize'] deployFn.tags = ['finalize']
......
...@@ -8,7 +8,7 @@ import { getContractDefinition } from '../src/contract-defs' ...@@ -8,7 +8,7 @@ import { getContractDefinition } from '../src/contract-defs'
import { import {
getContractFromArtifact, getContractFromArtifact,
deployAndVerifyAndThen, deployAndVerifyAndThen,
} from '../src/hardhat-deploy-ethers' } from '../src/deploy-utils'
import { names } from '../src/address-names' import { names } from '../src/address-names'
const deployFn: DeployFunction = async (hre) => { const deployFn: DeployFunction = async (hre) => {
......
...@@ -9,7 +9,7 @@ import { ...@@ -9,7 +9,7 @@ import {
getContractFromArtifact, getContractFromArtifact,
deployAndVerifyAndThen, deployAndVerifyAndThen,
isHardhatNode, isHardhatNode,
} from '../src/hardhat-deploy-ethers' } from '../src/deploy-utils'
import { names } from '../src/address-names' import { names } from '../src/address-names'
const deployFn: DeployFunction = async (hre) => { const deployFn: DeployFunction = async (hre) => {
......
...@@ -3,7 +3,7 @@ import { DeployFunction } from 'hardhat-deploy/dist/types' ...@@ -3,7 +3,7 @@ import { DeployFunction } from 'hardhat-deploy/dist/types'
import { hexStringEquals, awaitCondition } from '@eth-optimism/core-utils' import { hexStringEquals, awaitCondition } from '@eth-optimism/core-utils'
/* Imports: Internal */ /* Imports: Internal */
import { getContractFromArtifact } from '../src/hardhat-deploy-ethers' import { getContractFromArtifact } from '../src/deploy-utils'
const deployFn: DeployFunction = async (hre) => { const deployFn: DeployFunction = async (hre) => {
const { deployer } = await hre.getNamedAccounts() const { deployer } = await hre.getNamedAccounts()
......
...@@ -5,10 +5,7 @@ import { defaultHardhatNetworkHdAccountsConfigParams } from 'hardhat/internal/co ...@@ -5,10 +5,7 @@ import { defaultHardhatNetworkHdAccountsConfigParams } from 'hardhat/internal/co
import { normalizeHardhatNetworkAccountsConfig } from 'hardhat/internal/core/providers/util' import { normalizeHardhatNetworkAccountsConfig } from 'hardhat/internal/core/providers/util'
/* Imports: Internal */ /* Imports: Internal */
import { import { getContractFromArtifact, isHardhatNode } from '../src/deploy-utils'
getContractFromArtifact,
isHardhatNode,
} from '../src/hardhat-deploy-ethers'
import { names } from '../src/address-names' import { names } from '../src/address-names'
// This is a TEMPORARY way to fund the default hardhat accounts on L2. The better way to do this is // This is a TEMPORARY way to fund the default hardhat accounts on L2. The better way to do this is
......
...@@ -103,8 +103,16 @@ export const getAdvancedContract = (opts: { ...@@ -103,8 +103,16 @@ export const getAdvancedContract = (opts: {
for (const fnName of Object.keys(contract.functions)) { for (const fnName of Object.keys(contract.functions)) {
const fn = contract[fnName].bind(contract) const fn = contract[fnName].bind(contract)
;(contract as any)[fnName] = async (...args: any) => { ;(contract as any)[fnName] = async (...args: any) => {
// We want to use the gas price that has been configured at the beginning of the deployment.
// However, if the function being triggered is a "constant" (static) function, then we don't
// want to provide a gas price because we're prone to getting insufficient balance errors.
let gasPrice = opts.hre.deployConfig.gasPrice || undefined
if (contract.interface.getFunction(fnName).constant) {
gasPrice = 0
}
const tx = await fn(...args, { const tx = await fn(...args, {
gasPrice: opts.hre.deployConfig.gasprice || undefined, gasPrice,
}) })
if (typeof tx !== 'object' || typeof tx.wait !== 'function') { if (typeof tx !== 'object' || typeof tx.wait !== 'function') {
......
...@@ -34,6 +34,7 @@ ...@@ -34,6 +34,7 @@
"@discoveryjs/json-ext": "^0.5.3", "@discoveryjs/json-ext": "^0.5.3",
"@eth-optimism/core-utils": "0.7.1", "@eth-optimism/core-utils": "0.7.1",
"@ethersproject/abstract-provider": "^5.5.1", "@ethersproject/abstract-provider": "^5.5.1",
"@ethersproject/abi": "^5.5.0",
"@ethersproject/bignumber": "^5.5.0", "@ethersproject/bignumber": "^5.5.0",
"@ethersproject/properties": "^5.5.0", "@ethersproject/properties": "^5.5.0",
"@ethersproject/providers": "^5.5.0", "@ethersproject/providers": "^5.5.0",
...@@ -56,13 +57,14 @@ ...@@ -56,13 +57,14 @@
"eslint-plugin-prettier": "^3.4.0", "eslint-plugin-prettier": "^3.4.0",
"eslint-plugin-react": "^7.24.0", "eslint-plugin-react": "^7.24.0",
"eslint-plugin-unicorn": "^32.0.1", "eslint-plugin-unicorn": "^32.0.1",
"ethereum-waffle": "^3.4.0",
"ethereumjs-util": "^7.1.3", "ethereumjs-util": "^7.1.3",
"ethers": "^5.4.5", "ethers": "^5.4.5",
"lint-staged": "11.0.0", "lint-staged": "11.0.0",
"mocha": "^9.1.2", "mocha": "^9.1.2",
"node-fetch": "2.6.5", "node-fetch": "2.6.5",
"solc": "0.8.7-fixed", "solc": "0.8.7-fixed",
"ts-node": "^10.0.0", "ts-mocha": "^8.0.0",
"ts-mocha": "^8.0.0" "ts-node": "^10.0.0"
} }
} }
...@@ -13,7 +13,7 @@ import { ...@@ -13,7 +13,7 @@ import {
DELETE_CONTRACTS, DELETE_CONTRACTS,
} from './constants' } from './constants'
import { Account, AccountType, SurgeryDataSources } from './types' import { Account, AccountType, SurgeryDataSources } from './types'
import { hexStringEqual } from './utils' import { hexStringEqual, isBytecodeERC20 } from './utils'
export const classifiers: { export const classifiers: {
[key in AccountType]: (account: Account, data: SurgeryDataSources) => boolean [key in AccountType]: (account: Account, data: SurgeryDataSources) => boolean
...@@ -90,6 +90,9 @@ export const classifiers: { ...@@ -90,6 +90,9 @@ export const classifiers: {
[AccountType.VERIFIED]: (account, data) => { [AccountType.VERIFIED]: (account, data) => {
return !classifiers[AccountType.UNVERIFIED](account, data) return !classifiers[AccountType.UNVERIFIED](account, data)
}, },
[AccountType.ERC20]: (account) => {
return isBytecodeERC20(account.code)
},
} }
export const classify = ( export const classify = (
......
...@@ -434,4 +434,9 @@ export const handlers: { ...@@ -434,4 +434,9 @@ export const handlers: {
code: bytecode, code: bytecode,
} }
}, },
[AccountType.ERC20]: async (account) => {
throw new Error(
`Unexpected ERC20 classification, this should never happen: ${account.address}`
)
},
} }
...@@ -59,6 +59,7 @@ export enum AccountType { ...@@ -59,6 +59,7 @@ export enum AccountType {
UNISWAP_V3_OTHER, UNISWAP_V3_OTHER,
UNVERIFIED, UNVERIFIED,
VERIFIED, VERIFIED,
ERC20,
} }
export interface UniswapPoolData { export interface UniswapPoolData {
......
/* eslint @typescript-eslint/no-var-requires: "off" */ /* eslint @typescript-eslint/no-var-requires: "off" */
import { ethers } from 'ethers' import { ethers } from 'ethers'
import { abi as UNISWAP_FACTORY_ABI } from '@uniswap/v3-core/artifacts/contracts/UniswapV3Factory.sol/UniswapV3Factory.json' import { abi as UNISWAP_FACTORY_ABI } from '@uniswap/v3-core/artifacts/contracts/UniswapV3Factory.sol/UniswapV3Factory.json'
import { Interface } from '@ethersproject/abi'
import { parseChunked } from '@discoveryjs/json-ext' import { parseChunked } from '@discoveryjs/json-ext'
import { createReadStream } from 'fs' import { createReadStream } from 'fs'
import * as fs from 'fs' import * as fs from 'fs'
import byline from 'byline' import byline from 'byline'
import * as dotenv from 'dotenv' import * as dotenv from 'dotenv'
import * as assert from 'assert' import * as assert from 'assert'
import { reqenv, getenv } from '@eth-optimism/core-utils' import { reqenv, getenv, remove0x } from '@eth-optimism/core-utils'
import { import {
Account, Account,
EtherscanContract, EtherscanContract,
...@@ -114,6 +115,48 @@ export const getMappingKey = (keys: any[], slot: number) => { ...@@ -114,6 +115,48 @@ export const getMappingKey = (keys: any[], slot: number) => {
return key return key
} }
// ERC20 interface
const iface = new Interface([
'function balanceOf(address)',
'function name()',
'function symbol()',
'function decimals()',
'function totalSupply()',
'function transfer(address,uint256)',
])
// PUSH4 should prefix any 4 byte selector
const PUSH4 = 0x63
const erc20Sighashes = new Set()
// Build the set of erc20 4 byte selectors
for (const fn of Object.keys(iface.functions)) {
const sighash = iface.getSighash(fn)
erc20Sighashes.add(sighash)
}
export const isBytecodeERC20 = (bytecode: string): boolean => {
if (bytecode === '0x' || bytecode === undefined) {
return false
}
const seen = new Set()
const buf = Buffer.from(remove0x(bytecode), 'hex')
for (const [i, byte] of buf.entries()) {
// Track all of the observed 4 byte selectors that follow a PUSH4
// and are also present in the set of erc20Sighashes
if (byte === PUSH4) {
const sighash = '0x' + buf.slice(i + 1, i + 5).toString('hex')
if (erc20Sighashes.has(sighash)) {
seen.add(sighash)
}
}
}
// create a set that contains those elements of set
// erc20Sighashes that are not in set seen
const elements = [...erc20Sighashes].filter((x) => !seen.has(x))
return !elements.length
}
export const getUniswapV3Factory = (signerOrProvider: any): ethers.Contract => { export const getUniswapV3Factory = (signerOrProvider: any): ethers.Contract => {
return new ethers.Contract( return new ethers.Contract(
UNISWAP_V3_FACTORY_ADDRESS, UNISWAP_V3_FACTORY_ADDRESS,
......
import { expect } from '@eth-optimism/core-utils/test/setup'
import { BigNumber } from 'ethers'
import { env } from './setup'
describe('erc20', () => {
describe('standard ERC20', () => {
before(async () => {
await env.init()
})
it('ERC20s', () => {
for (const [i, erc20] of env.erc20s.entries()) {
describe(`erc20 ${i}/${env.erc20s.length} (${erc20.address})`, () => {
it('should have the same storage', async () => {
const account = env.surgeryDataSources.dump.find(
(a) => a.address === erc20.address
)
if (account.storage) {
for (const key of Object.keys(account.storage)) {
const pre = await env.preL2Provider.getStorageAt(
account.address,
BigNumber.from(key)
)
const post = await env.postL2Provider.getStorageAt(
account.address,
BigNumber.from(key)
)
expect(pre).to.deep.eq(post)
}
}
})
})
}
})
})
})
/* eslint-disable @typescript-eslint/no-empty-function */ import { ethers, BigNumber, Contract } from 'ethers'
describe.skip('predeploys', () => { import { expect, env, ERC20_ABI } from './setup'
import { AccountType } from '../scripts/types'
import { GenesisJsonProvider } from './provider'
describe('predeploys', () => {
const predeploys = {
eth: [],
newNotEth: [],
noWipe: [],
wipe: [],
weth: [],
}
// Base genesis file only
let genesisStateProvider: GenesisJsonProvider
// Old sequencer state
let oldStateProvider: GenesisJsonProvider
before(async () => {
await env.init()
predeploys.eth = env.getAccountsByType(AccountType.PREDEPLOY_ETH)
predeploys.newNotEth = env.getAccountsByType(
AccountType.PREDEPLOY_NEW_NOT_ETH
)
predeploys.noWipe = env.getAccountsByType(AccountType.PREDEPLOY_NO_WIPE)
predeploys.wipe = env.getAccountsByType(AccountType.PREDEPLOY_WIPE)
predeploys.weth = env.getAccountsByType(AccountType.PREDEPLOY_WETH)
genesisStateProvider = new GenesisJsonProvider(
env.surgeryDataSources.genesis
)
oldStateProvider = new GenesisJsonProvider(
env.surgeryDataSources.configs.stateDumpFilePath
)
})
describe('new predeploys that are not ETH', () => { describe('new predeploys that are not ETH', () => {
it('should have the exact state specified in the base genesis file', async () => {}) for (const [i, account] of predeploys.newNotEth.entries()) {
describe(`account ${i}/${predeploys.newNotEth.length} (${account.address})`, () => {
it('should have the exact state specified in the base genesis file', async () => {
const preBytecode = await genesisStateProvider.getCode(
account.address
)
const postBytecode = await env.postL2Provider.getCode(account.address)
expect(preBytecode).to.eq(postBytecode)
const dumpAccount = env.surgeryDataSources.dump.find(
(a) => a.address === account.address
)
if (dumpAccount.storage) {
for (const key of Object.keys(dumpAccount.storage)) {
const pre = await env.preL2Provider.getStorageAt(
account.address,
BigNumber.from(key)
)
const post = await env.postL2Provider.getStorageAt(
account.address,
BigNumber.from(key)
)
expect(pre).to.deep.eq(post)
}
}
const preNonce = await genesisStateProvider.getTransactionCount(
account.address,
env.config.stateDumpHeight
)
const postNonce = await env.postL2Provider.getTransactionCount(
account.address
)
expect(preNonce).to.deep.eq(postNonce)
const preBalance = await genesisStateProvider.getBalance(
account.address,
env.config.stateDumpHeight
)
const postBalance = await env.postL2Provider.getBalance(
account.address
)
expect(preBalance).to.deep.eq(postBalance)
})
})
}
}) })
describe('predeploys where the old state should be wiped', () => { describe('predeploys where the old state should be wiped', () => {
it('should have the code and storage of the base genesis file', async () => {}) for (const [i, account] of predeploys.wipe.entries()) {
describe(`account ${i}/${predeploys.wipe.length} (${account.address})`, () => {
it('should have the code and storage of the base genesis file', async () => {
const preBytecode = await genesisStateProvider.getCode(
account.address
)
const postBytecode = await env.postL2Provider.getCode(account.address)
expect(preBytecode).to.eq(postBytecode)
const dumpAccount = env.surgeryDataSources.dump.find(
(a) => a.address === account.address
)
if (dumpAccount.storage) {
for (const key of Object.keys(dumpAccount.storage)) {
const pre = await env.preL2Provider.getStorageAt(
account.address,
BigNumber.from(key)
)
const post = await env.postL2Provider.getStorageAt(
account.address,
BigNumber.from(key)
)
expect(pre).to.deep.eq(post)
}
}
})
it('should have the same nonce and balance as before', async () => {}) it('should have the same nonce and balance as before', async () => {
const preNonce = await oldStateProvider.getTransactionCount(
account.address,
env.config.stateDumpHeight
)
const postNonce = await env.postL2Provider.getTransactionCount(
account.address
)
expect(preNonce).to.deep.eq(postNonce)
const preBalance = await oldStateProvider.getBalance(
account.address,
env.config.stateDumpHeight
)
const postBalance = await env.postL2Provider.getBalance(
account.address
)
expect(preBalance).to.deep.eq(postBalance)
})
})
}
}) })
describe('predeploys where the old state should be preserved', () => { describe('predeploys where the old state should be preserved', () => {
it('should have the code of the base genesis file', async () => {}) for (const [i, account] of predeploys.noWipe.entries()) {
describe(`account ${i}/${predeploys.noWipe.length} (${account.address})`, () => {
it('should have the code of the base genesis file', async () => {
const preBytecode = await genesisStateProvider.getCode(
account.address
)
const postBytecode = await env.postL2Provider.getCode(account.address)
expect(preBytecode).to.eq(postBytecode)
})
it('should have the combined storage of the old and new state', async () => {
const dumpAccount = env.surgeryDataSources.dump.find(
(a) => a.address === account.address
)
if (dumpAccount.storage) {
for (const key of Object.keys(dumpAccount.storage)) {
const pre = await env.preL2Provider.getStorageAt(
account.address,
BigNumber.from(key)
)
const post = await env.postL2Provider.getStorageAt(
account.address,
BigNumber.from(key)
)
expect(pre).to.deep.eq(post)
}
}
})
it('should have the combined storage of the old and new state', async () => {}) it('should have the same nonce and balance as before', async () => {
const preNonce = await oldStateProvider.getTransactionCount(
account.address,
env.config.stateDumpHeight
)
const postNonce = await env.postL2Provider.getTransactionCount(
account.address
)
expect(preNonce).to.deep.eq(postNonce)
it('should have the same nonce and balance as before', async () => {}) const preBalance = await oldStateProvider.getBalance(
account.address,
env.config.stateDumpHeight
)
const postBalance = await env.postL2Provider.getBalance(
account.address
)
expect(preBalance).to.deep.eq(postBalance)
})
})
}
}) })
describe('OVM_ETH', () => { describe('OVM_ETH', () => {
it('should have disabled ERC20 features', async () => {}) if (!env.hasLiveProviders()) {
console.log('Cannot run pool contract tests without live provider')
return
}
let OVM_ETH: Contract
before(async () => {
OVM_ETH = new ethers.Contract(
predeploys.eth[0].address,
ERC20_ABI,
env.postL2Provider
)
})
it('should no recorded balance for the contracts that move to WETH9', async () => {}) for (const [i, account] of predeploys.eth.entries()) {
describe(`account ${i}/${predeploys.eth.length} (${account.address})`, () => {
it('should have disabled ERC20 features', async () => {
await expect(
OVM_ETH.transfer(account.address, 100)
).to.be.revertedWith(
'OVM_ETH: transfer is disabled pending further community discussion.'
)
})
it('should have a new balance for WETH9 equal to the sum of the moved contract balances', async () => {}) it('should have a new balance for WETH9 equal to the sum of the moved contract balances', async () => {
// need live provider for WETH balances
})
})
}
}) })
describe('WETH9', () => { describe('WETH9', () => {
it('should have balances for each contract that should move', async () => {}) for (const [i, account] of predeploys.weth.entries()) {
describe(`account ${i}/${predeploys.weth.length} (${account.address})`, () => {
it('should no recorded ETH balance', async () => {
const postBalance = await env.postL2Provider.getBalance(
account.address
)
expect(postBalance.toNumber()).to.eq(0)
})
it('should have WETH balances for each contract that should move', async () => {
if (!env.hasLiveProviders()) {
console.log('Cannot run pool contract tests without live provider')
return
}
})
it('should have a balance equal to the sum of all moved balances', async () => {}) it('should have a balance equal to the sum of all moved balances', async () => {
if (!env.hasLiveProviders()) {
console.log('Cannot run pool contract tests without live provider')
return
}
})
})
}
}) })
}) })
...@@ -49,7 +49,7 @@ const genesis: Genesis = { ...@@ -49,7 +49,7 @@ const genesis: Genesis = {
}, },
} }
describe.only('GenesisJsonProvider', () => { describe('GenesisJsonProvider', () => {
let provider let provider
before(() => { before(() => {
provider = new GenesisJsonProvider(genesis) provider = new GenesisJsonProvider(genesis)
......
...@@ -5,13 +5,15 @@ import chaiAsPromised from 'chai-as-promised' ...@@ -5,13 +5,15 @@ import chaiAsPromised from 'chai-as-promised'
import * as dotenv from 'dotenv' import * as dotenv from 'dotenv'
import { getenv, remove0x } from '@eth-optimism/core-utils' import { getenv, remove0x } from '@eth-optimism/core-utils'
import { providers, BigNumber } from 'ethers' import { providers, BigNumber } from 'ethers'
import { solidity } from 'ethereum-waffle'
import { SurgeryDataSources, Account, AccountType } from '../scripts/types' import { SurgeryDataSources, Account, AccountType } from '../scripts/types'
import { loadSurgeryData } from '../scripts/data' import { loadSurgeryData } from '../scripts/data'
import { classify } from '../scripts/classifiers' import { classify, classifiers } from '../scripts/classifiers'
import { GenesisJsonProvider } from './provider' import { GenesisJsonProvider } from './provider'
// Chai plugins go here. // Chai plugins go here.
chai.use(chaiAsPromised) chai.use(chaiAsPromised)
chai.use(solidity)
const should = chai.should() const should = chai.should()
const expect = chai.expect const expect = chai.expect
...@@ -19,6 +21,9 @@ const expect = chai.expect ...@@ -19,6 +21,9 @@ const expect = chai.expect
dotenv.config() dotenv.config()
export const NUM_ACCOUNTS_DIVISOR = 4096 export const NUM_ACCOUNTS_DIVISOR = 4096
export const ERC20_ABI = [
'function balanceOf(address owner) view returns (uint256)',
]
interface TestEnvConfig { interface TestEnvConfig {
preL2ProviderUrl: string | null preL2ProviderUrl: string | null
...@@ -64,6 +69,9 @@ class TestEnv { ...@@ -64,6 +69,9 @@ class TestEnv {
// List of typed accounts in the input dump // List of typed accounts in the input dump
accounts: TypedAccount[] = [] accounts: TypedAccount[] = []
// List of erc20 contracts in input dump
erc20s: Account[] = []
constructor(opts: TestEnvConfig) { constructor(opts: TestEnvConfig) {
this.config = opts this.config = opts
// If the pre provider url is provided, use a json rpc provider. // If the pre provider url is provided, use a json rpc provider.
...@@ -138,6 +146,10 @@ class TestEnv { ...@@ -138,6 +146,10 @@ class TestEnv {
...account, ...account,
type: accountType, type: accountType,
}) })
if (classifiers[AccountType.ERC20](account, this.surgeryDataSources)) {
this.erc20s.push(account)
}
} }
} }
} }
......
...@@ -2,11 +2,9 @@ import { ethers } from 'ethers' ...@@ -2,11 +2,9 @@ import { ethers } from 'ethers'
import { abi as UNISWAP_POOL_ABI } from '@uniswap/v3-core/artifacts/contracts/UniswapV3Pool.sol/UniswapV3Pool.json' import { abi as UNISWAP_POOL_ABI } from '@uniswap/v3-core/artifacts/contracts/UniswapV3Pool.sol/UniswapV3Pool.json'
import { UNISWAP_V3_NFPM_ADDRESS } from '../scripts/constants' import { UNISWAP_V3_NFPM_ADDRESS } from '../scripts/constants'
import { getUniswapV3Factory, replaceWETH } from '../scripts/utils' import { getUniswapV3Factory, replaceWETH } from '../scripts/utils'
import { expect, env } from './setup' import { expect, env, ERC20_ABI } from './setup'
import { AccountType } from '../scripts/types' import { AccountType } from '../scripts/types'
const ERC20_ABI = ['function balanceOf(address owner) view returns (uint256)']
describe('uniswap contracts', () => { describe('uniswap contracts', () => {
before(async () => { before(async () => {
await env.init() await env.init()
......
import { expect } from '@eth-optimism/core-utils/test/setup'
import fs from 'fs/promises'
import path from 'path'
import { isBytecodeERC20 } from '../scripts/utils'
describe('Utils', () => {
// Read in the mock data
const contracts = {}
before(async () => {
const files = await fs.readdir(path.join(__dirname, 'data'))
for (const filename of files) {
const file = await fs.readFile(path.join(__dirname, 'data', filename))
const name = path.parse(filename).name
const json = JSON.parse(file.toString())
contracts[name] = {
bytecode: json.bytecode.toString().trim(),
expected: json.expected,
}
}
})
it('isBytecodeERC20', () => {
for (const [name, contract] of Object.entries(contracts)) {
describe(`contract ${name}`, () => {
it('should be identified erc20', () => {
const result = isBytecodeERC20((contract as any).bytecode as string)
expect(result).to.eq((contract as any).expected)
})
})
}
})
})
...@@ -724,6 +724,21 @@ ...@@ -724,6 +724,21 @@
"@ethersproject/properties" "^5.4.0" "@ethersproject/properties" "^5.4.0"
"@ethersproject/strings" "^5.4.0" "@ethersproject/strings" "^5.4.0"
"@ethersproject/abi@^5.5.0":
version "5.5.0"
resolved "https://registry.yarnpkg.com/@ethersproject/abi/-/abi-5.5.0.tgz#fb52820e22e50b854ff15ce1647cc508d6660613"
integrity sha512-loW7I4AohP5KycATvc0MgujU6JyCHPqHdeoo9z3Nr9xEiNioxa65ccdm1+fsoJhkuhdRtfcL8cfyGamz2AxZ5w==
dependencies:
"@ethersproject/address" "^5.5.0"
"@ethersproject/bignumber" "^5.5.0"
"@ethersproject/bytes" "^5.5.0"
"@ethersproject/constants" "^5.5.0"
"@ethersproject/hash" "^5.5.0"
"@ethersproject/keccak256" "^5.5.0"
"@ethersproject/logger" "^5.5.0"
"@ethersproject/properties" "^5.5.0"
"@ethersproject/strings" "^5.5.0"
"@ethersproject/abstract-provider@5.4.1", "@ethersproject/abstract-provider@^5.0.0", "@ethersproject/abstract-provider@^5.4.0", "@ethersproject/abstract-provider@^5.4.1": "@ethersproject/abstract-provider@5.4.1", "@ethersproject/abstract-provider@^5.0.0", "@ethersproject/abstract-provider@^5.4.0", "@ethersproject/abstract-provider@^5.4.1":
version "5.4.1" version "5.4.1"
resolved "https://registry.yarnpkg.com/@ethersproject/abstract-provider/-/abstract-provider-5.4.1.tgz#e404309a29f771bd4d28dbafadcaa184668c2a6e" resolved "https://registry.yarnpkg.com/@ethersproject/abstract-provider/-/abstract-provider-5.4.1.tgz#e404309a29f771bd4d28dbafadcaa184668c2a6e"
...@@ -6861,7 +6876,7 @@ ethereum-cryptography@^0.1.2, ethereum-cryptography@^0.1.3: ...@@ -6861,7 +6876,7 @@ ethereum-cryptography@^0.1.2, ethereum-cryptography@^0.1.3:
secp256k1 "^4.0.1" secp256k1 "^4.0.1"
setimmediate "^1.0.5" setimmediate "^1.0.5"
ethereum-waffle@^3.3.0: ethereum-waffle@^3.3.0, ethereum-waffle@^3.4.0:
version "3.4.0" version "3.4.0"
resolved "https://registry.yarnpkg.com/ethereum-waffle/-/ethereum-waffle-3.4.0.tgz#990b3c6c26db9c2dd943bf26750a496f60c04720" resolved "https://registry.yarnpkg.com/ethereum-waffle/-/ethereum-waffle-3.4.0.tgz#990b3c6c26db9c2dd943bf26750a496f60c04720"
integrity sha512-ADBqZCkoSA5Isk486ntKJVjFEawIiC+3HxNqpJqONvh3YXBTNiRfXvJtGuAFLXPG91QaqkGqILEHANAo7j/olQ== integrity sha512-ADBqZCkoSA5Isk486ntKJVjFEawIiC+3HxNqpJqONvh3YXBTNiRfXvJtGuAFLXPG91QaqkGqILEHANAo7j/olQ==
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment