Commit 97c73cca authored by smartcontracts's avatar smartcontracts Committed by Kelvin Fichter

feat: add surgery script base (#1507)

* feat: add surgery script base

* test: add test setup for surgery handlers

* feat: add handlers for uniswap contracts

* asdf

* feat: clean up main surgery script

* surgery: add constants and solc download script

* feat: download pool data for surgery script

* feat: add handlers for remaining predeploys

* feat: ingest configs

* fix: bugs in the handling of weth and eth

* fix: add handling for uni pools

* feat: handle 1inch surgery steps

* feat: correctly load and generate the genesis file

* regenesis-surgery: implement recompilation

Add recompilation that links libraries and includes
immutables

* workflows: update ci to use node v14

* surgery: bugfixes and logging

* surgery: update

* surgery: more fixes

* fmfix: surgery script cleanup

* surgery: dockerize

* more cleanup

* surgery: fix tests

* surgery: update .env.example

* fix: ingest based on state dump output

* fix merge conflicts

* minor cleanup to getSolc function

* feat: standardize dump output

* fix: tests

* minor bug fixes

* final cleanup and optimizations

* clean up compilation step

* highly optimize surgery process

* fix: remove unused deps

* remove unit tests for now

* final cleanup and tweaks

* fix final bugs

* more minor bugfixes

* remove unnecessary giant json file

* fix: handle multiple libraries

* wip: start testing shell

* wip: continue filling out shell

* wip: minor tweaks to uniswap spec

* wip: refactor data loading so it can be used in tests

* surgery: initialize test infra

* core-utils: add helpful functions

* state-surgery: more test infra

* core-utils: env var parsing

* add Hop ETH addresses

* linting and test cleanup

* finalize eoa tests

* clean up test process for eoas

* add uniswap tests

* feat: surgery prevent double withdraw

* fix: pass thru no wipe before returning

* fix: undo temp code removal

* Update handlers.ts

* fix: uniswap immutables bug

* fix: make sure multicall is in the dump

* fix: revert multicall address logic

* state-surgery: basic tests for verified contracts

* fix: add data and solc bin dirs to eslint ignore

* lint: fix

* various minor fixes

* Update packages/regenesis-surgery/scripts/constants.ts
Co-authored-by: default avatarben-chain <ben@pseudonym.party>

* disable tests for CI

* fix: remove message passer handling

* fix: remove unused dependencies
Co-authored-by: default avatarAnnie Ke <annieke8@gmail.com>
Co-authored-by: default avatarMark Tyneway <mark.tyneway@gmail.com>
Co-authored-by: default avatarben-chain <ben@pseudonym.party>
parent 5d7e9b4e
......@@ -24,7 +24,7 @@ jobs:
- name: Setup node
uses: actions/setup-node@v1
with:
node-version: '12.x'
node-version: '14.x'
- name: Get yarn cache directory path
id: yarn-cache-dir-path
......@@ -73,7 +73,7 @@ jobs:
- name: Setup node
uses: actions/setup-node@v1
with:
node-version: '12.x'
node-version: '14.x'
- name: Get yarn cache directory path
id: yarn-cache-dir-path
......
......@@ -37,4 +37,8 @@ ps-metrics:
-f docker-compose.yml \
-f docker-compose-metrics.yml \
ps
.PHONY: ps
\ No newline at end of file
.PHONY: ps
regenesis-surgery:
docker build -f ./docker/Dockerfile.regenesis-surgery \
-t ethereumoptimism/regenesis-surgery:latest ..
......@@ -31,6 +31,7 @@ COPY packages/data-transport-layer/package.json ./packages/data-transport-layer/
COPY packages/batch-submitter/package.json ./packages/batch-submitter/package.json
COPY packages/message-relayer/package.json ./packages/message-relayer/package.json
COPY packages/replica-healthcheck/package.json ./packages/replica-healthcheck/package.json
COPY packages/regenesis-surgery/package.json ./packages/regenesis-surgery/package.json
COPY integration-tests/package.json ./integration-tests/package.json
RUN yarn install --frozen-lockfile
......
FROM ethereumoptimism/builder AS builder
FROM node:14-alpine
RUN apk add --no-cache curl bash jq
WORKDIR /opt/optimism
# copy top level files
COPY --from=builder /optimism/*.json /optimism/yarn.lock ./
COPY --from=builder /optimism/node_modules ./node_modules
# copy deps (would have been nice if docker followed the symlinks required)
COPY --from=builder /optimism/packages/core-utils/package.json ./packages/core-utils/package.json
COPY --from=builder /optimism/packages/core-utils/dist ./packages/core-utils/dist
COPY --from=builder /optimism/packages/common-ts/package.json ./packages/common-ts/package.json
COPY --from=builder /optimism/packages/common-ts/dist ./packages/common-ts/dist
COPY --from=builder /optimism/packages/contracts/package.json ./packages/contracts/package.json
COPY --from=builder /optimism/packages/contracts/deployments ./packages/contracts/deployments
COPY --from=builder /optimism/packages/contracts/dist ./packages/contracts/dist
COPY --from=builder /optimism/packages/contracts/artifacts ./packages/contracts/artifacts
# copy the service
WORKDIR /opt/optimism/packages/regenesis-surgery
COPY --from=builder /optimism/packages/regenesis-surgery/package.json ./
COPY --from=builder /optimism/packages/regenesis-surgery/scripts ./scripts
COPY --from=builder /optimism/packages/regenesis-surgery/node_modules ./node_modules
......@@ -10,3 +10,29 @@ export const sleep = async (ms: number): Promise<void> => {
}, ms)
})
}
// Returns a copy of an object
export const clone = (obj: any): any => {
if (typeof obj === 'undefined') {
throw new Error(`Trying to clone undefined object`)
}
return { ...obj }
}
/**
* Loads a variable from the environment and throws if the variable is not defined.
*
* @param name Name of the variable to load.
* @returns Value of the variable as a string.
*/
export const reqenv = (name: string): string => {
const value = process.env[name]
if (value === undefined) {
throw new Error(`missing env var ${name}`)
}
return value
}
export const getenv = (name: string, fallback?: string): string | undefined => {
return process.env[name] || fallback
}
FROM_BLOCK=
TO_BLOCK=
SEQUENCER_URL=
ETH_ADDR=
UNI_FACTORY_ADDR=
BLOCK_INTERVAL=
EVENTS_OUTPUT_PATH=
REGEN__STATE_DUMP_FILE=
REGEN__ETHERSCAN_FILE=
REGEN__GENESIS_FILE=
REGEN__OUTPUT_FILE=
REGEN__L2_PROVIDER_URL=
REGEN__L2_NETWORK_NAME=
REGEN__L1_PROVIDER_URL=
REGEN__L1_TESTNET_PROVIDER_URL=
REGEN__L1_TESTNET_PRIVATE_KEY=
START_INDEX=
END_INDEX=
module.exports = {
extends: '../../.eslintrc.js',
ignorePatterns: ['/data', '/solc-bin'],
}
node_modules/
build/
all-events.json
solc-bin/
outputs/
etherscan/
state-dumps/
data/
......@@ -9,19 +9,27 @@
"lint": "yarn run lint:fix && yarn run lint:check",
"lint:fix": "yarn lint:check --fix",
"lint:check": "eslint .",
"pre-commit": "lint-staged"
"pre-commit": "lint-staged",
"test:surgery": "hardhat --config test/config/hardhat.config.ts test",
"start": "ts-node ./scripts/surgery.ts"
},
"devDependencies": {
"@types/node": "^15.12.2",
"ts-mocha": "^8.0.0",
"ts-node": "^10.0.0",
"typescript": "^4.3.2"
},
"dependencies": {
"@discoveryjs/json-ext": "^0.5.3",
"@eth-optimism/contracts": "^0.4.10",
"@uniswap/v3-core-optimism": "^1.0.0-rc.0",
"@eth-optimism/core-utils": "^0.6.0",
"@types/node": "^15.12.2",
"@types/node-fetch": "^3.0.3",
"@uniswap/sdk-core": "^3.0.1",
"@uniswap/v3-core": "^1.0.0",
"@uniswap/v3-sdk": "^3.5.1",
"byline": "^5.0.0",
"chai-as-promised": "^7.1.1",
"dotenv": "^10.0.0",
"ethers": "^5.4.5"
"ethereumjs-util": "^7.1.2",
"ethers": "^5.4.5",
"hardhat": "^2.6.5",
"mocha": "^9.1.2",
"node-fetch": "2.6.5",
"solc": "0.8.7-fixed",
"ts-node": "^10.0.0"
}
}
import {
EOA_CODE_HASHES,
UNISWAP_V3_FACTORY_ADDRESS,
UNISWAP_V3_NFPM_ADDRESS,
UNISWAP_V3_CONTRACT_ADDRESSES,
PREDEPLOY_WIPE_ADDRESSES,
PREDEPLOY_NO_WIPE_ADDRESSES,
PREDEPLOY_NEW_NOT_ETH_ADDRESSES,
OLD_ETH_ADDRESS,
NEW_ETH_ADDRESS,
ONEINCH_DEPLOYER_ADDRESS,
DELETE_CONTRACTS,
} from './constants'
import { Account, AccountType, SurgeryDataSources } from './types'
import { hexStringEqual } from './utils'
export const classifiers: {
[key in AccountType]: (account: Account, data: SurgeryDataSources) => boolean
} = {
[AccountType.ONEINCH_DEPLOYER]: (account) => {
return hexStringEqual(account.address, ONEINCH_DEPLOYER_ADDRESS)
},
[AccountType.DELETE]: (account) => {
return DELETE_CONTRACTS.some((addr) => {
return hexStringEqual(account.address, addr)
})
},
[AccountType.EOA]: (account) => {
// Just in case the account doesn't have a code hash
if (!account.codeHash) {
return false
}
return EOA_CODE_HASHES.some((codeHash) => {
return hexStringEqual(account.codeHash, codeHash)
})
},
[AccountType.PRECOMPILE]: (account) => {
return account.address
.toLowerCase()
.startsWith('0x00000000000000000000000000000000000000')
},
[AccountType.PREDEPLOY_NEW_NOT_ETH]: (account) => {
return PREDEPLOY_NEW_NOT_ETH_ADDRESSES.some((addr) => {
return hexStringEqual(account.address, addr)
})
},
[AccountType.PREDEPLOY_WIPE]: (account) => {
return PREDEPLOY_WIPE_ADDRESSES.some((addr) => {
return hexStringEqual(account.address, addr)
})
},
[AccountType.PREDEPLOY_NO_WIPE]: (account) => {
return PREDEPLOY_NO_WIPE_ADDRESSES.some((addr) => {
return hexStringEqual(account.address, addr)
})
},
[AccountType.PREDEPLOY_ETH]: (account) => {
return hexStringEqual(account.address, NEW_ETH_ADDRESS)
},
[AccountType.PREDEPLOY_WETH]: (account) => {
return hexStringEqual(account.address, OLD_ETH_ADDRESS)
},
[AccountType.UNISWAP_V3_FACTORY]: (account) => {
return hexStringEqual(account.address, UNISWAP_V3_FACTORY_ADDRESS)
},
[AccountType.UNISWAP_V3_NFPM]: (account) => {
return hexStringEqual(account.address, UNISWAP_V3_NFPM_ADDRESS)
},
[AccountType.UNISWAP_V3_POOL]: (account, data) => {
return data.pools.some((pool) => {
return hexStringEqual(pool.oldAddress, account.address)
})
},
[AccountType.UNISWAP_V3_OTHER]: (account) => {
return UNISWAP_V3_CONTRACT_ADDRESSES.some((addr) => {
return hexStringEqual(account.address, addr)
})
},
[AccountType.UNVERIFIED]: (account, data) => {
const found = data.etherscanDump.find(
(c) => c.contractAddress === account.address
)
return found === undefined || found.sourceCode === ''
},
[AccountType.VERIFIED]: (account, data) => {
return !classifiers[AccountType.UNVERIFIED](account, data)
},
}
export const classify = (
account: Account,
data: SurgeryDataSources
): AccountType => {
for (const accountType in AccountType) {
if (!isNaN(Number(accountType))) {
if (classifiers[accountType](account, data)) {
return Number(accountType)
}
}
}
}
import path from 'path'
// Codehashes of OVM_ECDSAContractAccount for 0.3.0 and 0.4.0
export const EOA_CODE_HASHES = [
'0xa73df79c90ba2496f3440188807022bed5c7e2e826b596d22bcb4e127378835a',
'0xef2ab076db773ffc554c9f287134123439a5228e92f5b3194a28fec0a0afafe3',
]
export const UNISWAP_V3_FACTORY_ADDRESS =
'0x1F98431c8aD98523631AE4a59f267346ea31F984'
export const UNISWAP_V3_NFPM_ADDRESS =
'0xC36442b4a4522E871399CD717aBDD847Ab11FE88'
export const UNISWAP_V3_CONTRACT_ADDRESSES = [
// PoolDeployer
'0x569E8D536EC2dD5988857147c9FCC7d8a08a7DBc',
// UniswapV3Factory
'0x1F98431c8aD98523631AE4a59f267346ea31F984',
// ProxyAdmin
'0xB753548F6E010e7e680BA186F9Ca1BdAB2E90cf2',
// TickLens
'0xbfd8137f7d1516D3ea5cA83523914859ec47F573',
// Quoter
'0xb27308f9F90D607463bb33eA1BeBb41C27CE5AB6',
// SwapRouter
'0xE592427A0AEce92De3Edee1F18E0157C05861564',
// NonfungiblePositionLibrary
'0x42B24A95702b9986e82d421cC3568932790A48Ec',
// NonfungibleTokenPositionDescriptor
'0x91ae842A5Ffd8d12023116943e72A606179294f3',
// TransparentUpgradeableProxy
'0xEe6A57eC80ea46401049E92587E52f5Ec1c24785',
// NonfungibleTokenPositionManager
'0xC36442b4a4522E871399CD717aBDD847Ab11FE88',
// UniswapInterfaceMulticall (OP KOVAN)
'0x1F98415757620B543A52E61c46B32eB19261F984',
// UniswapInterfaceMulticall (OP MAINNET)
'0x90f872b3d8f33f305e0250db6A2761B354f7710A',
]
export const PREDEPLOY_WIPE_ADDRESSES = [
// L2CrossDomainMessenger
'0x4200000000000000000000000000000000000007',
// OVM_GasPriceOracle
'0x420000000000000000000000000000000000000F',
// L2StandardBridge
'0x4200000000000000000000000000000000000010',
// OVM_SequencerFeeVault
'0x4200000000000000000000000000000000000011',
]
export const PREDEPLOY_NO_WIPE_ADDRESSES = [
// OVM_DeployerWhitelist
'0x4200000000000000000000000000000000000002',
// OVM_L2ToL1MessagePasser
'0x4200000000000000000000000000000000000000',
]
export const PREDEPLOY_NEW_NOT_ETH_ADDRESSES = [
// L2StandardTokenFactory
'0x4200000000000000000000000000000000000012',
// OVM_L1BlockNumber
'0x4200000000000000000000000000000000000013',
]
export const OLD_ETH_ADDRESS = '0x4200000000000000000000000000000000000006'
export const NEW_ETH_ADDRESS = '0xDeadDeAddeAddEAddeadDEaDDEAdDeaDDeAD0000'
export const ONEINCH_DEPLOYER_ADDRESS =
'0xee4f7b6c39e7e87af01fb9e4cee0c893ff4d63f2'
export const DELETE_CONTRACTS = [
// 1inch aggregator
'0x11111112542D85B3EF69AE05771c2dCCff4fAa26',
// OVM_L1MessageSender
'0x4200000000000000000000000000000000000001',
// OVM v1 System Contract
'0xDEADDEaDDeAddEADDeaDDEADdeaDdeAddeAd0005',
// OVM v1 System Contract
'0xDEADdeAdDeAddEAdDEaDdEaddEAddeaDdEaD0006',
// OVM v1 System Contract
'0xDeaDDeaDDeaddEADdeaDdEadDeaDdeADDEad0007',
// Uniswap Position
'0x18F7E3ae7202e93984290e1195810c66e1E276FF',
// Uniswap Oracle
'0x17b0f5e5850e7230136df66c5d49497b8c3be0c1',
// Uniswap Tick
'0x47405b0d5f88e16701be6dc8ae185fefaa5dca2f',
// Uniswap TickBitmap
'0x01d95165c3c730d6b40f55c37e24c7aac73d5e6f',
// Uniswap TickMath
'0x308c3e60585ad4eab5b7677be0566fead4cb4746',
// Uniswap SwapMath
'0x198dcc7cd919dd33dd72c3f981df653750901d75',
// Uniswap UniswapV3PoolDeployer
'0x569e8d536ec2dd5988857147c9fcc7d8a08a7dbc',
// Uniswap NFTDescriptor
'0x042f51014b152c2d2fc9b57e36b16bc744065d8c',
]
export const WETH_TRANSFER_ADDRESSES = [
// Rubicon Mainnet bathETH
'0xB0bE5d911E3BD4Ee2A8706cF1fAc8d767A550497',
// Rubicon Mainnet bathETH-USDC
'0x87a7Eed69eaFA78D30344001D0baFF99FC005Dc8',
// Rubicon Mainnet bathETH-DAI
'0x314eC4Beaa694264746e1ae324A5edB913a6F7C6',
// Rubicon Mainnet bathETH-USDT
'0xF6A47B24e80D12Ac7d3b5Cef67B912BCd3377333',
// Rubicon Mainnet exchange
'0x7a512d3609211e719737E82c7bb7271eC05Da70d',
// Rubicon Kovan bathETH
'0x5790AedddfB25663f7dd58261De8E96274A82BAd',
// Rubicon Kovan bathETH-USDC
'0x52fBa53c876a47a64A10F111fbeA7Ed506dCc7e7',
// Rubicon Kovan bathETH-DAI
'0xA92E4Bd9f61e90757Cd8806D236580698Fc20C91',
// Rubicon Kovan bathETH-USDT
'0x80D94a6f6b0335Bfed8D04B92423B6Cd14b5d31C',
// Rubicon Kovan market
'0x5ddDa7DF721272106af1904abcc64E76AB2019d2',
// Hop Kovan AMM Wrapper
'0xc9E6628791cdD4ad568550fcc6f378cEF27e98fd',
// Hop Kovan Swap
'0xD6E31cE884DFf44c4600fD9D36BcC9af447C28d5',
]
// TODO: confirm OVM/EVM mapps with ben-chain
export const COMPILER_VERSIONS_TO_SOLC = {
'v0.5.16': 'v0.5.16+commit.9c3226ce',
'v0.5.16-alpha.7': 'v0.5.16+commit.9c3226ce',
'v0.6.12': 'v0.6.12+commit.27d51765',
'v0.7.6': 'v0.7.6+commit.7338295f',
'v0.7.6+commit.3b061308': 'v0.7.6+commit.7338295f',
'v0.7.6-allow_kall': 'v0.7.6+commit.7338295f',
'v0.7.6-no_errors': 'v0.7.6+commit.7338295f',
'v0.8.4': 'v0.8.4+commit.c7e474f2',
}
export const SOLC_BIN_PATH = 'https://binaries.soliditylang.org'
export const EMSCRIPTEN_BUILD_PATH = `${SOLC_BIN_PATH}/emscripten-wasm32`
export const EMSCRIPTEN_BUILD_LIST = `${EMSCRIPTEN_BUILD_PATH}/list.json`
export const LOCAL_SOLC_DIR = path.join(__dirname, '..', 'solc-bin')
import { ethers } from 'ethers'
import {
computePoolAddress,
POOL_INIT_CODE_HASH,
POOL_INIT_CODE_HASH_OPTIMISM,
POOL_INIT_CODE_HASH_OPTIMISM_KOVAN,
} from '@uniswap/v3-sdk'
import { Token } from '@uniswap/sdk-core'
import { UNISWAP_V3_FACTORY_ADDRESS } from './constants'
import { downloadAllSolcVersions } from './solc'
import {
PoolHashCache,
StateDump,
UniswapPoolData,
SurgeryDataSources,
EtherscanContract,
SurgeryConfigs,
GenesisFile,
} from './types'
import {
loadConfigs,
checkStateDump,
readDumpFile,
readEtherscanFile,
readGenesisFile,
getUniswapV3Factory,
getMappingKey,
} from './utils'
export const getUniswapPoolData = async (
l2Provider: ethers.providers.BaseProvider,
network: 'mainnet' | 'kovan'
): Promise<UniswapPoolData[]> => {
const UniswapV3Factory = getUniswapV3Factory(l2Provider)
const pools: UniswapPoolData[] = []
const poolEvents = await UniswapV3Factory.queryFilter('PoolCreated' as any)
for (const event of poolEvents) {
// Compute the old pool address using the OVM init code hash.
const oldPoolAddress = computePoolAddress({
factoryAddress: UNISWAP_V3_FACTORY_ADDRESS,
tokenA: new Token(0, event.args.token0, 18),
tokenB: new Token(0, event.args.token1, 18),
fee: event.args.fee,
initCodeHashManualOverride:
network === 'mainnet'
? POOL_INIT_CODE_HASH_OPTIMISM
: POOL_INIT_CODE_HASH_OPTIMISM_KOVAN,
}).toLowerCase()
// Compute the new pool address using the EVM init code hash.
const newPoolAddress = computePoolAddress({
factoryAddress: UNISWAP_V3_FACTORY_ADDRESS,
tokenA: new Token(0, event.args.token0, 18),
tokenB: new Token(0, event.args.token1, 18),
fee: event.args.fee,
initCodeHashManualOverride: POOL_INIT_CODE_HASH,
}).toLowerCase()
pools.push({
oldAddress: oldPoolAddress,
newAddress: newPoolAddress,
token0: event.args.token0,
token1: event.args.token1,
fee: event.args.fee,
})
}
return pools
}
export const makePoolHashCache = (pools: UniswapPoolData[]): PoolHashCache => {
const cache: PoolHashCache = {}
for (const pool of pools) {
for (let i = 0; i < 1000; i++) {
cache[getMappingKey([pool.oldAddress], i)] = {
pool,
index: i,
}
}
}
return cache
}
export const loadSurgeryData = async (
configs?: SurgeryConfigs
): Promise<SurgeryDataSources> => {
// First download every solc version that we'll need during this surgery.
console.log('Downloading all required solc versions...')
await downloadAllSolcVersions()
// Load the configuration values, will throw if anything is missing.
if (configs === undefined) {
console.log('Loading configuration values...')
configs = loadConfigs()
}
// Load and validate the state dump.
console.log('Loading and validating state dump file...')
const dump: StateDump = await readDumpFile(configs.stateDumpFilePath)
checkStateDump(dump)
console.log(`${dump.length} entries in state dump`)
// Load the genesis file.
console.log('Loading genesis file...')
const genesis: GenesisFile = await readGenesisFile(configs.genesisFilePath)
const genesisDump: StateDump = []
for (const [address, account] of Object.entries(genesis.alloc)) {
genesisDump.push({
address,
...account,
})
}
console.log(`${genesisDump.length} entries in genesis file`)
// Load the etherscan dump.
console.log('Loading etherscan dump file...')
const etherscanDump: EtherscanContract[] = await readEtherscanFile(
configs.etherscanFilePath
)
console.log(`${etherscanDump.length} entries in etherscan dump`)
// Get a reference to the L2 provider so we can load pool data.
console.log('Connecting to L2 provider...')
const l2Provider = new ethers.providers.JsonRpcProvider(configs.l2ProviderUrl)
// Load the pool data.
console.log('Loading Uniswap pool data...')
const pools: UniswapPoolData[] = await getUniswapPoolData(
l2Provider,
configs.l2NetworkName
)
console.log(`${pools.length} uniswap pools`)
console.log('Generating pool cache...')
const poolHashCache = makePoolHashCache(pools)
// Get a reference to the ropsten provider and wallet, used for deploying Uniswap pools.
console.log('Connecting to ropsten provider...')
const ropstenProvider = new ethers.providers.JsonRpcProvider(
configs.ropstenProviderUrl
)
const ropstenWallet = new ethers.Wallet(
configs.ropstenPrivateKey,
ropstenProvider
)
// Get a reference to the L1 provider.
console.log('Connecting to L1 provider...')
const l1Provider = new ethers.providers.JsonRpcProvider(configs.l1ProviderUrl)
// Get a reference to an ETH (mainnet) provider.
console.log('Connecting to ETH provider...')
const ethProvider = new ethers.providers.JsonRpcProvider(
configs.ethProviderUrl
)
return {
configs,
dump,
genesis: genesisDump,
pools,
poolHashCache,
etherscanDump,
ropstenProvider,
ropstenWallet,
l1Provider,
l2Provider,
ethProvider,
}
}
import { Contract, providers, ethers } from 'ethers'
import { createWriteStream } from 'fs'
import dotenv from 'dotenv'
import { stringifyStream } from '@discoveryjs/json-ext'
import { getContractFactory } from '@eth-optimism/contracts'
import { abi as FACTORY_ABI } from '@uniswap/v3-core-optimism/artifacts-ovm/contracts/UniswapV3Factory.sol/UniswapV3Factory.json'
dotenv.config()
const env = process.env
const SEQUENCER_URL = env.SEQUENCER_URL || 'http://localhost:8545'
const ETH_ADDR = env.ETH_ADDR || '0x4200000000000000000000000000000000000006'
const UNI_FACTORY_ADDR =
env.UNI_FACTORY_ADDR || '0x1F98431c8aD98523631AE4a59f267346ea31F984' // address on mainnet
const FROM_BLOCK = env.FROM_BLOCK || '0'
const TO_BLOCK = env.TO_BLOCK || 'latest'
const BLOCK_INTERVAL = parseInt(env.BLOCK_INTERVAL, 10) || 2000
const EVENTS_OUTPUT_PATH = env.EVENTS_OUTPUT_PATH || './all-events.json'
interface FindAllEventsOptions {
provider: providers.StaticJsonRpcProvider
contract: Contract
filter: ethers.EventFilter
fromBlock?: number
toBlock?: number
blockInterval?: number
}
interface AllEventsOutput {
ethTransfers: ethers.Event[]
uniV3FeeAmountEnabled: ethers.Event[]
uniV3PoolCreated: ethers.Event[]
lastBlock: number
}
const findAllEvents = async (
options: FindAllEventsOptions
): Promise<ethers.Event[]> => {
const { provider, contract, filter, fromBlock, toBlock, blockInterval } =
options
const cache = {
startingBlockNumber: fromBlock || 0,
events: [],
}
let events: ethers.Event[] = []
let startingBlockNumber = fromBlock || 0
let endingBlockNumber = toBlock || (await provider.getBlockNumber())
while (startingBlockNumber < endingBlockNumber) {
events = events.concat(
await contract.queryFilter(
filter,
startingBlockNumber, // inclusive of both beginning and end
// https://docs.ethers.io/v5/api/providers/types/#providers-Filter
Math.min(startingBlockNumber + blockInterval - 1, endingBlockNumber)
)
)
if (startingBlockNumber + blockInterval > endingBlockNumber) {
cache.startingBlockNumber = endingBlockNumber
cache.events = cache.events.concat(events)
break
}
startingBlockNumber += blockInterval
endingBlockNumber = await provider.getBlockNumber()
}
return cache.events
}
;(async () => {
console.log('Ready to index events')
const provider = new ethers.providers.StaticJsonRpcProvider(SEQUENCER_URL)
const signer = ethers.Wallet.createRandom().connect(provider)
const ethContract = getContractFactory('OVM_ETH')
.connect(signer)
.attach(ETH_ADDR)
const uniV3FactoryContract = new Contract(
UNI_FACTORY_ADDR,
FACTORY_ABI,
provider
)
let maxBlock
if (TO_BLOCK === 'latest') {
const lastBlock = await provider.getBlock('latest')
maxBlock = lastBlock.number
} else {
maxBlock = parseInt(TO_BLOCK, 10)
}
console.log('Max block:', maxBlock)
const fromBlock = parseInt(FROM_BLOCK, 10)
const [ethTransfers, uniV3FeeAmountEnabled, uniV3PoolCreated] =
await Promise.all([
findAllEvents({
provider,
contract: ethContract,
filter: ethContract.filters.Transfer(),
fromBlock,
toBlock: maxBlock,
blockInterval: BLOCK_INTERVAL,
}),
findAllEvents({
provider,
contract: uniV3FactoryContract,
filter: uniV3FactoryContract.filters.FeeAmountEnabled(),
fromBlock,
toBlock: maxBlock,
blockInterval: BLOCK_INTERVAL,
}),
findAllEvents({
provider,
contract: uniV3FactoryContract,
filter: uniV3FactoryContract.filters.PoolCreated(),
fromBlock,
toBlock: maxBlock,
blockInterval: BLOCK_INTERVAL,
}),
])
console.log(`Found ${ethTransfers.length} ETH transfer events`)
console.log(`Found ${uniV3FeeAmountEnabled.length} FeeAmountEnabled events`)
console.log(`Found ${uniV3PoolCreated.length} PoolCreated events`)
const output: AllEventsOutput = {
lastBlock: maxBlock,
ethTransfers,
uniV3FeeAmountEnabled,
uniV3PoolCreated,
}
console.log('Writing output to file', EVENTS_OUTPUT_PATH)
const writeStream = createWriteStream(EVENTS_OUTPUT_PATH, 'utf-8')
stringifyStream(output, null, 2)
.pipe(writeStream)
.on('error', (error) => console.error(error))
.on('finish', () => console.log('Done writing to json file'))
})().catch((err) => {
console.log(err)
process.exit(1)
})
This diff is collapsed.
/* eslint @typescript-eslint/no-var-requires: "off" */
import { access, mkdir } from 'fs/promises'
import fetch from 'node-fetch'
import path from 'path'
import fs from 'fs'
import solc from 'solc'
import { ethers } from 'ethers'
import { clone } from '@eth-optimism/core-utils'
import {
COMPILER_VERSIONS_TO_SOLC,
EMSCRIPTEN_BUILD_LIST,
EMSCRIPTEN_BUILD_PATH,
LOCAL_SOLC_DIR,
} from './constants'
import { EtherscanContract } from './types'
const OVM_BUILD_PATH = (version: string) => {
return `https://raw.githubusercontent.com/ethereum-optimism/solc-bin/9455107699d2f7ad9b09e1005c7c07f4b5dd6857/bin/soljson-${version}.js`
}
/**
* Downloads a specific solc version.
*
* @param version Solc version to download.
* @param ovm If true, downloads from the OVM repository.
*/
export const downloadSolc = async (version: string, ovm?: boolean) => {
// TODO: why is this one missing?
if (version === 'v0.5.16-alpha.7') {
return
}
// File is the location where we'll put the downloaded compiler.
let file: string
// Remote is the URL we'll query if the file doesn't already exist.
let remote: string
// Exact file/remote will depend on if downloading OVM or EVM compiler.
if (ovm) {
file = `${path.join(LOCAL_SOLC_DIR, version)}.js`
remote = OVM_BUILD_PATH(version)
} else {
const res = await fetch(EMSCRIPTEN_BUILD_LIST)
const data: any = await res.json()
const list = data.builds
// Make sure the target version actually exists
let target: any
for (const entry of list) {
const longVersion = `v${entry.longVersion}`
if (version === longVersion) {
target = entry
}
}
// Error out if the given version can't be found
if (!target) {
throw new Error(`Cannot find compiler version ${version}`)
}
file = path.join(LOCAL_SOLC_DIR, target.path)
remote = `${EMSCRIPTEN_BUILD_PATH}/${target.path}`
}
try {
// Check to see if we already have the file
await access(file, fs.constants.F_OK)
} catch (e) {
console.error(`Downloading ${version} ${ovm ? 'ovm' : 'solidity'}`)
// If we don't have the file, download it
const res = await fetch(remote)
const bin = await res.text()
fs.writeFileSync(file, bin)
}
}
/**
* Downloads all required solc versions, if not already downloaded.
*/
export const downloadAllSolcVersions = async () => {
try {
await mkdir(LOCAL_SOLC_DIR)
} catch (e) {
// directory already exists
}
// Keys are OVM versions.
await Promise.all(
// Use a set to dedupe the list of versions.
[...new Set(Object.keys(COMPILER_VERSIONS_TO_SOLC))].map(
async (version) => {
await downloadSolc(version, true)
}
)
)
// Values are EVM versions.
await Promise.all(
// Use a set to dedupe the list of versions.
[...new Set(Object.values(COMPILER_VERSIONS_TO_SOLC))].map(
async (version) => {
await downloadSolc(version)
}
)
)
}
export const getMainContract = (contract: EtherscanContract, output) => {
if (contract.contractFileName) {
return clone(
output.contracts[contract.contractFileName][contract.contractName]
)
}
return clone(output.contracts.file[contract.contractName])
}
export const getSolc = (version: string, ovm?: boolean) => {
return solc.setupMethods(
require(path.join(
LOCAL_SOLC_DIR,
ovm ? version : `solc-emscripten-wasm32-${version}.js`
))
)
}
export const solcInput = (contract: EtherscanContract) => {
// Create a base solc input object
const input = {
language: 'Solidity',
sources: {
file: {
content: contract.sourceCode,
},
},
settings: {
outputSelection: {
'*': {
'*': ['*'],
},
},
optimizer: {
enabled: contract.optimizationUsed === '1',
runs: parseInt(contract.runs, 10),
},
},
}
try {
// source code may be one of 3 things
// - raw content string
// - sources object
// - entire input
let sourceCode = contract.sourceCode
// Remove brackets that are wrapped around the source
// when trying to parse json
if (sourceCode.substr(0, 2) === '{{') {
// Trim the first and last bracket
sourceCode = sourceCode.slice(1, -1)
}
// If the source code is valid json, and
// has the keys of a solc input, just return it
const json = JSON.parse(sourceCode)
// If the json has language, then it is the whole input
if (json.language) {
return json
}
// Add the json file as the sources
input.sources = json
} catch (e) {
//
}
return input
}
const compilerCache: {
[hash: string]: any
} = {}
export const compile = (opts: {
contract: EtherscanContract
ovm: boolean
}): any => {
let version: string
if (opts.ovm) {
version = opts.contract.compilerVersion
} else {
version = COMPILER_VERSIONS_TO_SOLC[opts.contract.compilerVersion]
if (!version) {
throw new Error(
`Unable to find solc version ${opts.contract.compilerVersion}`
)
}
}
const solcInstance = getSolc(version, opts.ovm)
const input = JSON.stringify(solcInput(opts.contract))
const inputHash = ethers.utils.solidityKeccak256(['string'], [input])
// Cache the compiler output to speed up repeated compilations of the same contract. If this
// cache is too memory intensive, then we could consider only caching if the contract has been
// seen more than once.
let output: any
if (compilerCache[inputHash]) {
output = compilerCache[inputHash]
} else {
output = JSON.parse(solcInstance.compile(input))
compilerCache[inputHash] = output
}
if (!output.contracts) {
throw new Error(`Cannot compile ${opts.contract.contractAddress}`)
}
const mainOutput = getMainContract(opts.contract, output)
if (!mainOutput) {
throw new Error(
`Contract filename mismatch: ${opts.contract.contractAddress}`
)
}
return mainOutput
}
import { ethers } from 'ethers'
import fs from 'fs'
import { add0x, remove0x, clone } from '@eth-optimism/core-utils'
import { StateDump, SurgeryDataSources, AccountType } from './types'
import { findAccount } from './utils'
import { handlers } from './handlers'
import { classify } from './classifiers'
import { loadSurgeryData } from './data'
const doGenesisSurgery = async (
data: SurgeryDataSources
): Promise<StateDump> => {
// We'll generate the final genesis file from this output.
const output: StateDump = []
// Handle each account in the state dump.
const input = data.dump.slice(data.configs.startIndex, data.configs.endIndex)
// Insert any accounts in the genesis that aren't already in the state dump.
for (const account of data.genesis) {
if (findAccount(input, account.address) === undefined) {
input.push(account)
}
}
for (const [i, account] of input.entries()) {
const accountType = classify(account, data)
console.log(
`[${i}/${input.length}] ${AccountType[accountType]}: ${account.address}`
)
const handler = handlers[accountType]
const newAccount = await handler(clone(account), data)
if (newAccount !== undefined) {
output.push(newAccount)
}
}
// Clean up and standardize the dump. Also performs a few tricks to reduce the overall size of
// the state dump, which reduces bandwidth requirements.
console.log('Cleaning up and standardizing dump format...')
for (const account of output) {
for (const [key, val] of Object.entries(account)) {
// We want to be left with the following fields:
// - balance
// - nonce
// - code
// - storage (if necessary)
if (key === 'storage') {
if (Object.keys(account[key]).length === 0) {
// We don't need storage if there are no storage values.
delete account[key]
} else {
// We can remove 0x from storage keys and vals to save space.
for (const [storageKey, storageVal] of Object.entries(account[key])) {
delete account.storage[storageKey]
account.storage[remove0x(storageKey)] = remove0x(storageVal)
}
}
} else if (key === 'code') {
// Code MUST start with 0x.
account[key] = add0x(val)
} else if (key === 'codeHash' || key === 'root') {
// Neither of these fields are necessary. Geth will automatically generate them from the
// code and storage.
delete account[key]
} else if (key === 'balance' || key === 'nonce') {
// At this point we know that the input is either a string or a number. If it's a number,
// we want to convert it into a string.
let stripped = typeof val === 'number' ? val.toString(16) : val
// Remove 0x so we can strip any leading zeros.
stripped = remove0x(stripped)
// We can further reduce our genesis size by removing leading zeros. We can even go as far
// as removing the entire string because Geth appears to treat the empty string as 0.
stripped = stripped.replace().replace(/^0+/, '')
// We have to add 0x if the value is greater or equal to than 10 because Geth will throw an
// error otherwise.
if (stripped !== '' && ethers.BigNumber.from(add0x(stripped)).gte(10)) {
stripped = add0x(stripped)
}
account[key] = stripped
} else if (key === 'address') {
// Keep the address as-is, we'll delete it eventually.
} else {
throw new Error(`unexpected account field: ${key}`)
}
}
}
return output
}
const main = async () => {
// Load the surgery data.
const data = await loadSurgeryData()
// Do the surgery process and get the new genesis dump.
console.log('Starting surgery process...')
const finalGenesisDump = await doGenesisSurgery(data)
// Convert to the format that Geth expects.
console.log('Converting dump to final format...')
const finalGenesisAlloc = {}
for (const account of finalGenesisDump) {
const address = account.address
delete account.address
finalGenesisAlloc[remove0x(address)] = account
}
// Attach all of the original genesis configuration values.
const finalGenesis = {
...data.genesis,
alloc: finalGenesisAlloc,
}
// Write the final genesis file to disk.
console.log('Writing final genesis to disk...')
fs.writeFileSync(
data.configs.outputFilePath,
JSON.stringify(finalGenesis, null, 2)
)
console.log('All done!')
}
main()
import { ethers } from 'ethers'
export interface SurgeryConfigs {
stateDumpFilePath: string
etherscanFilePath: string
genesisFilePath: string
outputFilePath: string
l2NetworkName: SupportedNetworks
l2ProviderUrl: string
l1ProviderUrl: string
ropstenProviderUrl: string
ropstenPrivateKey: string
ethProviderUrl: string
stateDumpHeight: number
startIndex: number
endIndex: number
}
export interface Account {
address: string
nonce: number | string
balance: string
codeHash?: string
root?: string
code?: string
storage?: {
[key: string]: string
}
}
export type StateDump = Account[]
export interface GethStateDump {
[address: string]: {
nonce: number
balance: string
codeHash: string
root: string
code?: string
storage?: {
[key: string]: string
}
}
}
export enum AccountType {
ONEINCH_DEPLOYER,
DELETE,
EOA,
PRECOMPILE,
PREDEPLOY_NEW_NOT_ETH,
PREDEPLOY_WIPE,
PREDEPLOY_NO_WIPE,
PREDEPLOY_ETH,
PREDEPLOY_WETH,
UNISWAP_V3_FACTORY,
UNISWAP_V3_NFPM,
UNISWAP_V3_POOL,
UNISWAP_V3_OTHER,
UNVERIFIED,
VERIFIED,
}
export interface UniswapPoolData {
oldAddress: string
newAddress: string
token0: string
token1: string
fee: ethers.BigNumber
}
export interface EtherscanContract {
contractAddress: string
code: string
hash: string
sourceCode: string
creationCode: string
contractFileName: string
contractName: string
compilerVersion: string
optimizationUsed: string
runs: string
constructorArguments: string
library: string
}
export type EtherscanDump = EtherscanContract[]
export type SupportedNetworks = 'mainnet' | 'kovan'
export interface SurgeryDataSources {
configs: SurgeryConfigs
dump: StateDump
genesis: StateDump
pools: UniswapPoolData[]
poolHashCache: PoolHashCache
etherscanDump: EtherscanContract[]
ropstenProvider: ethers.providers.JsonRpcProvider
ropstenWallet: ethers.Wallet
l1Provider: ethers.providers.JsonRpcProvider
l2Provider: ethers.providers.JsonRpcProvider
ethProvider: ethers.providers.JsonRpcProvider
}
export interface GenesisFile {
config: {
chainId: number
homesteadBlock: number
eip150Block: number
eip155Block: number
eip158Block: number
byzantiumBlock: number
constantinopleBlock: number
petersburgBlock: number
istanbulBlock: number
muirGlacierBlock: number
clique: {
period: number
epoch: number
}
}
difficulty: string
gasLimit: string
extraData: string
alloc: GethStateDump
}
export interface ImmutableReference {
start: number
length: number
}
export interface ImmutableReferences {
[key: string]: ImmutableReference[]
}
export interface PoolHashCache {
[key: string]: {
pool: UniswapPoolData
index: number
}
}
/* eslint @typescript-eslint/no-var-requires: "off" */
import { ethers } from 'ethers'
import { abi as UNISWAP_FACTORY_ABI } from '@uniswap/v3-core/artifacts/contracts/UniswapV3Factory.sol/UniswapV3Factory.json'
import { parseChunked } from '@discoveryjs/json-ext'
import { createReadStream } from 'fs'
import * as fs from 'fs'
import byline from 'byline'
import * as dotenv from 'dotenv'
import * as assert from 'assert'
import { reqenv, getenv } from '@eth-optimism/core-utils'
import {
Account,
EtherscanContract,
StateDump,
SurgeryConfigs,
GenesisFile,
SupportedNetworks,
} from './types'
import { UNISWAP_V3_FACTORY_ADDRESS } from './constants'
export const findAccount = (dump: StateDump, address: string): Account => {
return dump.find((acc) => {
return hexStringEqual(acc.address, address)
})
}
export const hexStringIncludes = (a: string, b: string): boolean => {
if (!ethers.utils.isHexString(a)) {
throw new Error(`not a hex string: ${a}`)
}
if (!ethers.utils.isHexString(b)) {
throw new Error(`not a hex string: ${b}`)
}
return a.slice(2).toLowerCase().includes(b.slice(2).toLowerCase())
}
export const hexStringEqual = (a: string, b: string): boolean => {
if (!ethers.utils.isHexString(a)) {
throw new Error(`not a hex string: ${a}`)
}
if (!ethers.utils.isHexString(b)) {
throw new Error(`not a hex string: ${b}`)
}
return a.toLowerCase() === b.toLowerCase()
}
/**
* Left-pads a hex string with zeroes to 32 bytes.
*
* @param val Value to hex pad to 32 bytes.
* @returns Value padded to 32 bytes.
*/
export const toHex32 = (val: string | number | ethers.BigNumber) => {
return ethers.utils.hexZeroPad(ethers.BigNumber.from(val).toHexString(), 32)
}
export const transferStorageSlot = (opts: {
account: Account
oldSlot: string | number
newSlot: string | number
newValue?: string
}): void => {
if (opts.account.storage === undefined) {
throw new Error(`account has no storage: ${opts.account.address}`)
}
if (typeof opts.oldSlot !== 'string') {
opts.oldSlot = toHex32(opts.oldSlot)
}
if (typeof opts.newSlot !== 'string') {
opts.newSlot = toHex32(opts.newSlot)
}
const oldSlotVal = opts.account.storage[opts.oldSlot]
if (oldSlotVal === undefined) {
throw new Error(
`old slot not found in state dump, address=${opts.account.address}, slot=${opts.oldSlot}`
)
}
if (opts.newValue === undefined) {
opts.account.storage[opts.newSlot] = oldSlotVal
} else {
if (opts.newValue.startsWith('0x')) {
opts.newValue = opts.newValue.slice(2)
}
opts.account.storage[opts.newSlot] = opts.newValue
}
delete opts.account.storage[opts.oldSlot]
}
export const getMappingKey = (keys: any[], slot: number) => {
// TODO: assert keys.length > 0
let key = ethers.utils.keccak256(
ethers.utils.hexConcat([toHex32(keys[0]), toHex32(slot)])
)
if (keys.length > 1) {
for (let i = 1; i < keys.length; i++) {
key = ethers.utils.keccak256(
ethers.utils.hexConcat([toHex32(keys[i]), key])
)
}
}
return key
}
export const getUniswapV3Factory = (signerOrProvider: any): ethers.Contract => {
return new ethers.Contract(
UNISWAP_V3_FACTORY_ADDRESS,
UNISWAP_FACTORY_ABI,
signerOrProvider
)
}
export const loadConfigs = (): SurgeryConfigs => {
dotenv.config()
const stateDumpFilePath = reqenv('REGEN__STATE_DUMP_FILE')
const etherscanFilePath = reqenv('REGEN__ETHERSCAN_FILE')
const genesisFilePath = reqenv('REGEN__GENESIS_FILE')
const outputFilePath = reqenv('REGEN__OUTPUT_FILE')
const l2NetworkName = reqenv('REGEN__L2_NETWORK_NAME')
const l2ProviderUrl = reqenv('REGEN__L2_PROVIDER_URL')
const l1ProviderUrl = reqenv('REGEN__L1_PROVIDER_URL')
const ropstenProviderUrl = reqenv('REGEN__ROPSTEN_PROVIDER_URL')
const ropstenPrivateKey = reqenv('REGEN__ROPSTEN_PRIVATE_KEY')
const ethProviderUrl = reqenv('REGEN__ETH_PROVIDER_URL')
const stateDumpHeight = parseInt(reqenv('REGEN__STATE_DUMP_HEIGHT'), 10)
const startIndex = parseInt(getenv('REGEN__START_INDEX', '0'), 10)
const endIndex = parseInt(getenv('REGEN__START_INDEX', '0'), 10) || Infinity
// Input assertions
assert.ok(
['mainnet', 'kovan'].includes(l2NetworkName),
`L2_NETWORK_NAME must be one of "mainnet" or "kovan"`
)
return {
stateDumpFilePath,
etherscanFilePath,
genesisFilePath,
outputFilePath,
l2NetworkName: l2NetworkName as SupportedNetworks,
l2ProviderUrl,
l1ProviderUrl,
ropstenProviderUrl,
ropstenPrivateKey,
ethProviderUrl,
stateDumpHeight,
startIndex,
endIndex,
}
}
/**
* Reads the state dump file into an object. Required because the dumps get quite large.
* JavaScript throws an error when trying to load large JSON files (>512mb) directly via
* fs.readFileSync. Need a streaming approach instead.
*
* @param dumppath Path to the state dump file.
* @returns Parsed state dump object.
*/
export const readDumpFile = async (dumppath: string): Promise<StateDump> => {
return new Promise<StateDump>((resolve) => {
const dump: StateDump = []
const stream = byline(fs.createReadStream(dumppath, { encoding: 'utf8' }))
let isFirstRow = true
stream.on('data', (line: any) => {
const account = JSON.parse(line)
if (isFirstRow) {
isFirstRow = false
} else {
delete account.key
dump.push(account)
}
})
stream.on('end', () => {
resolve(dump)
})
})
}
export const readEtherscanFile = async (
etherscanpath: string
): Promise<EtherscanContract[]> => {
return parseChunked(createReadStream(etherscanpath))
}
export const readGenesisFile = async (
genesispath: string
): Promise<GenesisFile> => {
return JSON.parse(fs.readFileSync(genesispath, 'utf8'))
}
export const readGenesisStateDump = async (
genesispath: string
): Promise<StateDump> => {
const genesis = await readGenesisFile(genesispath)
const genesisDump: StateDump = []
for (const [address, account] of Object.entries(genesis.alloc)) {
genesisDump.push({
address,
...account,
})
}
return genesisDump
}
export const checkStateDump = (dump: StateDump) => {
for (const account of dump) {
assert.equal(
account.address.toLowerCase(),
account.address,
`unexpected upper case character in state dump address: ${account.address}`
)
assert.ok(
typeof account.nonce === 'number',
`nonce is not a number: ${account.nonce}`
)
if (account.codeHash) {
assert.equal(
account.codeHash.toLowerCase(),
account.codeHash,
`unexpected upper case character in state dump codeHash: ${account.codeHash}`
)
}
if (account.root) {
assert.equal(
account.root.toLowerCase(),
account.root,
`unexpected upper case character in state dump root: ${account.root}`
)
}
if (account.code) {
assert.equal(
account.code.toLowerCase(),
account.code,
`unexpected upper case character in state dump code: ${account.code}`
)
}
// All accounts other than precompiles should have a balance of zero.
if (
!account.address.startsWith('0x00000000000000000000000000000000000000')
) {
assert.equal(
account.balance,
'0',
`unexpected non-zero balance in state dump address: ${account.address}`
)
}
if (account.storage !== undefined) {
for (const [storageKey, storageVal] of Object.entries(account.storage)) {
assert.equal(
storageKey.toLowerCase(),
storageKey,
`unexpected upper case character in state dump storage key: ${storageKey}`
)
assert.equal(
storageVal.toLowerCase(),
storageVal,
`unexpected upper case character in state dump storage value: ${storageVal}`
)
}
}
}
}
import { env } from './setup'
before('initializing test environment', async () => {
await env.init()
})
import { HardhatUserConfig } from 'hardhat/config'
const config: HardhatUserConfig = {
// All paths relative to ** this file **.
paths: {
tests: '../../test',
cache: '../temp/cache',
artifacts: '../temp/artifacts',
},
mocha: {
timeout: 100000,
},
}
export default config
import { KECCAK256_RLP_S, KECCAK256_NULL_S } from 'ethereumjs-util'
import { add0x } from '@eth-optimism/core-utils'
import { ethers } from 'ethers'
import { expect, env } from '../setup'
import { AccountType } from '../../scripts/types'
describe('deleted contracts', () => {
before(async () => {
const accs = env.getAccountsByType(AccountType.DELETE)
for (const [i, acc] of accs.entries()) {
describe(`account ${i}/${accs.length} (${acc.address})`, () => {
it('should not have any code', async () => {
const code = await env.postL2Provider.getCode(acc.address)
expect(code).to.eq('0x')
})
it('should have the null code hash and storage root', async () => {
const proof = await env.postL2Provider.send('eth_getProof', [
acc.address,
[],
'latest',
])
expect(proof.codeHash).to.equal(add0x(KECCAK256_NULL_S))
expect(proof.storageHash).to.equal(add0x(KECCAK256_RLP_S))
})
it('should have a balance equal to zero', async () => {
// Balance after can come from the latest block.
const balance = await env.postL2Provider.getBalance(acc.address)
expect(balance).to.deep.eq(ethers.BigNumber.from(0))
})
it('should have a nonce equal to zero', async () => {
// Nonce after can come from the latest block.
const nonce = await env.postL2Provider.getTransactionCount(
acc.address
)
expect(nonce).to.deep.eq(0)
})
})
}
})
// Hack for dynamically generating tests based on async data.
// eslint-disable-next-line @typescript-eslint/no-empty-function
it('stub', async () => {})
})
import { KECCAK256_RLP_S, KECCAK256_NULL_S } from 'ethereumjs-util'
import { add0x } from '@eth-optimism/core-utils'
import { expect, env, NUM_ACCOUNTS_DIVISOR } from '../setup'
import { AccountType, Account } from '../../scripts/types'
describe('EOAs', () => {
describe('standard EOA', () => {
before(async () => {
const eoas = env.getAccountsByType(AccountType.EOA)
for (const [i, eoa] of eoas.entries()) {
if (i % NUM_ACCOUNTS_DIVISOR === 0) {
describe(`account ${i}/${eoas.length} (${eoa.address})`, () => {
it('should not have any code', async () => {
const code = await env.postL2Provider.getCode(eoa.address)
expect(code).to.eq('0x')
})
it('should have the null code hash and storage root', async () => {
const proof = await env.postL2Provider.send('eth_getProof', [
eoa.address,
[],
'latest',
])
expect(proof.codeHash).to.equal(add0x(KECCAK256_NULL_S))
expect(proof.storageHash).to.equal(add0x(KECCAK256_RLP_S))
})
it('should have the same balance as it had before', async () => {
// Balance before needs to come from the specific block at which the dump was taken.
const preBalance = await env.preL2Provider.getBalance(
eoa.address,
env.config.stateDumpHeight
)
// Balance after can come from the latest block.
const postBalance = await env.postL2Provider.getBalance(
eoa.address
)
expect(preBalance).to.deep.eq(postBalance)
})
it('should have the same nonce as it had before', async () => {
// Nonce before needs to come from the specific block at which the dump was taken.
const preNonce = await env.preL2Provider.getTransactionCount(
eoa.address,
env.config.stateDumpHeight
)
// Nonce after can come from the latest block.
const postNonce = await env.postL2Provider.getTransactionCount(
eoa.address
)
expect(preNonce).to.deep.eq(postNonce)
})
})
}
}
})
// Hack for dynamically generating tests based on async data.
// eslint-disable-next-line @typescript-eslint/no-empty-function
it('stub', async () => {})
})
// Does not exist on Kovan?
describe.skip('1inch deployer', () => {
let eoa: Account
before(() => {
eoa = env.getAccountsByType(AccountType.ONEINCH_DEPLOYER)[0]
})
it('should not have any code', async () => {
const code = await env.postL2Provider.getCode(eoa.address)
expect(code).to.eq('0x')
})
it('should have the null code hash and storage root', async () => {
const proof = await env.postL2Provider.send('eth_getProof', [
eoa.address,
[],
'latest',
])
expect(proof.codeHash).to.equal(add0x(KECCAK256_NULL_S))
expect(proof.storageHash).to.equal(add0x(KECCAK256_RLP_S))
})
it('should have the same balance as it had before', async () => {
// Balance before needs to come from the specific block at which the dump was taken.
const preBalance = await env.preL2Provider.getBalance(
eoa.address,
env.config.stateDumpHeight
)
// Balance after can come from the latest block.
const postBalance = await env.postL2Provider.getBalance(eoa.address)
expect(preBalance).to.deep.eq(postBalance)
})
it('should have a nonce equal to zero', async () => {
// Nonce before needs to come from the specific block at which the dump was taken.
const preNonce = await env.preL2Provider.getTransactionCount(
eoa.address,
env.config.stateDumpHeight
)
// Nonce after can come from the latest block.
const postNonce = await env.postL2Provider.getTransactionCount(
eoa.address
)
expect(preNonce).to.deep.eq(postNonce)
})
})
})
/* eslint-disable @typescript-eslint/no-empty-function */
describe.skip('predeploys', () => {
describe('new predeploys that are not ETH', () => {
it('should have the exact state specified in the base genesis file', async () => {})
})
describe('predeploys where the old state should be wiped', () => {
it('should have the code and storage of the base genesis file', async () => {})
it('should have the same nonce and balance as before', async () => {})
})
describe('predeploys where the old state should be preserved', () => {
it('should have the code of the base genesis file', async () => {})
it('should have the combined storage of the old and new state', async () => {})
it('should have the same nonce and balance as before', async () => {})
})
describe('OVM_ETH', () => {
it('should have disabled ERC20 features', async () => {})
it('should no recorded balance for the contracts that move to WETH9', async () => {})
it('should have a new balance for WETH9 equal to the sum of the moved contract balances', async () => {})
})
describe('WETH9', () => {
it('should have balances for each contract that should move', async () => {})
it('should have a balance equal to the sum of all moved balances', async () => {})
})
})
import { ethers } from 'ethers'
import { abi as UNISWAP_POOL_ABI } from '@uniswap/v3-core/artifacts/contracts/UniswapV3Pool.sol/UniswapV3Pool.json'
import { UNISWAP_V3_NFPM_ADDRESS } from '../../scripts/constants'
import { getUniswapV3Factory } from '../../scripts/utils'
import { expect, env } from '../setup'
import { AccountType } from '../../scripts/types'
const ERC20_ABI = ['function balanceOf(address owner) view returns (uint256)']
describe('uniswap contracts', () => {
describe('V3 factory', () => {
let preUniswapV3Factory: ethers.Contract
let postUniswapV3Factory: ethers.Contract
before(async () => {
preUniswapV3Factory = getUniswapV3Factory(env.preL2Provider)
postUniswapV3Factory = getUniswapV3Factory(env.postL2Provider)
})
it('should have the same owner', async () => {
const preOwner = await preUniswapV3Factory.owner()
const postOwner = await postUniswapV3Factory.owner()
expect(preOwner).to.equal(postOwner)
})
it('should have the same feeAmountTickSpacing map values', async () => {
for (const fee of [500, 3000, 10000]) {
const preValue = await preUniswapV3Factory.feeAmountTickSpacing(fee)
const postValue = await postUniswapV3Factory.feeAmountTickSpacing(fee)
expect(preValue).to.deep.equal(postValue)
}
})
it('should have the right pool addresses', async () => {
for (const pool of env.surgeryDataSources.pools) {
const remotePoolAddress1 = await postUniswapV3Factory.getPool(
pool.token0,
pool.token1,
pool.fee
)
const remotePoolAddress2 = await postUniswapV3Factory.getPool(
pool.token1,
pool.token0,
pool.fee
)
expect(remotePoolAddress1).to.equal(remotePoolAddress2)
expect(remotePoolAddress1.toLowerCase()).to.equal(
pool.newAddress.toLowerCase()
)
}
})
it('should have the same code as on mainnet', async () => {
const l2Code = await env.postL2Provider.getCode(
postUniswapV3Factory.address
)
const l1Code = await env.surgeryDataSources.l1Provider.getCode(
postUniswapV3Factory.address
)
expect(l2Code).to.not.equal('0x')
expect(l2Code).to.equal(l1Code)
})
})
describe('V3 NFPM', () => {
it('should have the same code as on mainnet', async () => {
const l2Code = await env.postL2Provider.getCode(UNISWAP_V3_NFPM_ADDRESS)
const l1Code = await env.surgeryDataSources.l1Provider.getCode(
UNISWAP_V3_NFPM_ADDRESS
)
expect(l2Code).to.not.equal('0x')
expect(l2Code).to.equal(l1Code)
})
// TODO: what's the best way to test the _poolIds change?
})
describe('V3 pools', () => {
before(async () => {
for (const pool of env.surgeryDataSources.pools) {
describe(`pool at address ${pool.newAddress}`, () => {
let prePoolContract: ethers.Contract
let postPoolContract: ethers.Contract
before(async () => {
prePoolContract = new ethers.Contract(
pool.oldAddress,
UNISWAP_POOL_ABI,
env.preL2Provider
)
postPoolContract = new ethers.Contract(
pool.newAddress,
UNISWAP_POOL_ABI,
env.postL2Provider
)
})
it('should have the same code as on testnet', async () => {
const l2Code = await env.postL2Provider.getCode(
postPoolContract.address
)
const l1Code = await env.surgeryDataSources.l1Provider.getCode(
postPoolContract.address
)
expect(l2Code).to.not.equal('0x')
expect(l2Code).to.equal(l1Code)
})
it('should have the same storage values', async () => {
const varsToCheck = [
'slot0',
'feeGrowthGlobal0X128',
'feeGrowthGlobal1X128',
'protocolFees',
'liquidity',
'factory',
'token0',
'token1',
'fee',
'tickSpacing',
'maxLiquidityPerTick',
]
for (const varName of varsToCheck) {
const preValue = await prePoolContract[varName]({
blockTag: env.config.stateDumpHeight,
})
const postValue = await postPoolContract[varName]()
expect(preValue).to.deep.equal(postValue)
}
})
it('should have the same token balances as before', async () => {
const baseERC20 = new ethers.Contract(
ethers.constants.AddressZero,
ERC20_ABI
)
const preToken0 = baseERC20
.attach(pool.token0)
.connect(env.preL2Provider)
const postToken0 = baseERC20
.attach(pool.token0)
.connect(env.postL2Provider)
const preToken1 = baseERC20
.attach(pool.token1)
.connect(env.preL2Provider)
const postToken1 = baseERC20
.attach(pool.token1)
.connect(env.postL2Provider)
// Token0 might not have any code in the new system, we can skip this check if so.
const newToken0Code = await env.postL2Provider.getCode(pool.token0)
if (newToken0Code !== '0x') {
const preBalance0 = await preToken0.balanceOf(pool.oldAddress, {
blockTag: env.config.stateDumpHeight,
})
const postBalance0 = await postToken0.balanceOf(pool.newAddress)
expect(preBalance0).to.deep.equal(postBalance0)
}
// Token1 might not have any code in the new system, we can skip this check if so.
const newToken1Code = await env.postL2Provider.getCode(pool.token1)
if (newToken1Code !== '0x') {
const preBalance1 = await preToken1.balanceOf(pool.oldAddress, {
blockTag: env.config.stateDumpHeight,
})
const postBalance1 = await postToken1.balanceOf(pool.newAddress)
expect(preBalance1).to.deep.equal(postBalance1)
}
})
})
}
// TODO: add a test for minting positions?
})
// Hack for dynamically generating tests based on async data.
// eslint-disable-next-line @typescript-eslint/no-empty-function
it('stub', async () => {})
})
describe('other', () => {
before(async () => {
const accs = env.getAccountsByType(AccountType.UNISWAP_V3_OTHER)
for (const acc of accs) {
describe(`uniswap contract at address ${acc.address}`, () => {
it('should have the same code as on mainnet', async () => {
const l2Code = await env.postL2Provider.getCode(acc.address)
const l1Code = await env.surgeryDataSources.l1Provider.getCode(
acc.address
)
expect(l2Code).to.not.equal('0x')
expect(l2Code).to.equal(l1Code)
})
})
}
})
// Hack for dynamically generating tests based on async data.
// eslint-disable-next-line @typescript-eslint/no-empty-function
it('stub', async () => {})
})
})
/* eslint-disable @typescript-eslint/no-empty-function */
import { expect, env, NUM_ACCOUNTS_DIVISOR } from '../setup'
import { AccountType, Account } from '../../scripts/types'
describe('verified', () => {
before(async () => {
const verified = env.getAccountsByType(AccountType.VERIFIED)
for (const [i, account] of verified.entries()) {
if (i % NUM_ACCOUNTS_DIVISOR === 0) {
const preBytecode = await env.preL2Provider.getCode(account.address)
const postBytecode = await env.postL2Provider.getCode(account.address)
describe(`account ${i}/${verified.length} (${account.address})`, () => {
it('should have new bytecode with equal or smaller size', async () => {
const preSize = preBytecode.length
const postSize = postBytecode.length
expect(preSize >= postSize).to.be.true
})
it('should have the same nonce and balance', async () => {
const preNonce = await env.preL2Provider.getTransactionCount(
account.address,
env.config.stateDumpHeight
)
const postNonce = await env.postL2Provider.getTransactionCount(
account.address
)
expect(preNonce).to.deep.eq(postNonce)
const preBalance = await env.preL2Provider.getBalance(
account.address,
env.config.stateDumpHeight
)
const postBalance = await env.postL2Provider.getBalance(
account.address
)
expect(preBalance).to.deep.eq(postBalance)
})
})
}
}
})
it('stub', async () => {})
})
/* External Imports */
import chai = require('chai')
import Mocha from 'mocha'
import chaiAsPromised from 'chai-as-promised'
import * as dotenv from 'dotenv'
import { reqenv, getenv } from '@eth-optimism/core-utils'
import { providers } from 'ethers'
import { SurgeryDataSources, Account, AccountType } from '../scripts/types'
import { loadSurgeryData } from '../scripts/data'
import { classify } from '../scripts/classifiers'
// Chai plugins go here.
chai.use(chaiAsPromised)
const should = chai.should()
const expect = chai.expect
dotenv.config()
export const NUM_ACCOUNTS_DIVISOR = 4096
interface TestEnvConfig {
preL2ProviderUrl: string
postL2ProviderUrl: string
stateDumpHeight: string | number
}
const config = (): TestEnvConfig => {
const height = getenv('REGEN__STATE_DUMP_HEIGHT')
return {
preL2ProviderUrl: reqenv('REGEN__PRE_L2_PROVIDER_URL'),
postL2ProviderUrl: reqenv('REGEN__POST_L2_PROVIDER_URL'),
stateDumpHeight: parseInt(height, 10) || 'latest',
}
}
interface TypedAccount extends Account {
type: AccountType
}
// A TestEnv that contains all of the required test data
class TestEnv {
// Config
config: TestEnvConfig
// An L2 provider configured to be able to query a pre
// regenesis L2 node. This node should be synced to the
// height that the state dump was taken
preL2Provider: providers.StaticJsonRpcProvider
// An L2 provider configured to be able to query a post
// regenesis L2 node. This L2 node was initialized with
// the results of the state surgery script
postL2Provider: providers.StaticJsonRpcProvider
// The datasources used for doing state surgery
surgeryDataSources: SurgeryDataSources
// List of typed accounts in the input dump
accounts: TypedAccount[] = []
constructor(opts: TestEnvConfig) {
this.config = opts
this.preL2Provider = new providers.StaticJsonRpcProvider(
opts.preL2ProviderUrl
)
this.postL2Provider = new providers.StaticJsonRpcProvider(
opts.postL2ProviderUrl
)
}
// Read the big files from disk. Without bumping the size of the nodejs heap,
// this can oom the process. Prefix the test command with:
// $ NODE_OPTIONS=--max_old_space=8912
async init() {
if (this.surgeryDataSources === undefined) {
this.surgeryDataSources = await loadSurgeryData()
// Classify the accounts once, this takes a while so it's better to cache it.
console.log(`Classifying accounts...`)
for (const account of this.surgeryDataSources.dump) {
const accountType = classify(account, this.surgeryDataSources)
this.accounts.push({
...account,
type: accountType,
})
}
}
}
getAccountsByType(type: AccountType) {
return this.accounts.filter((account) => account.type === type)
}
}
// Create a singleton test env that can be imported into each
// test file. It is important that the async operations are only
// called once as they take awhile. Each test file should be sure
// to call `env.init()` in a `before` clause to ensure that
// the files are read from disk at least once
let env: TestEnv
try {
if (env === undefined) {
const cfg = config()
env = new TestEnv(cfg)
}
} catch (e) {
console.error(`unable to initialize test env: ${e.toString()}`)
}
export { should, expect, Mocha, env }
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"resolveJsonModule": true,
}
"extends": "../../tsconfig.json"
}
......@@ -5,6 +5,7 @@
"sourceMap": true,
"esModuleInterop": true,
"composite": true,
"resolveJsonModule": true,
"declaration": true,
"noImplicitAny": false,
"removeComments": true,
......
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment