Commit 617efd58 authored by Matthew Slipper's avatar Matthew Slipper Committed by GitHub

Merge pull request #2193 from ethereum-optimism/develop

Develop -> Master
parents d10e3199 9e229c62
---
'@eth-optimism/sdk': patch
---
Adds support for depositing or withdrawing to a target address
---
'@eth-optimism/proxyd': minor
---
Handle nil responses better
---
'@eth-optimism/batch-submitter-service': patch
---
Switch num_elements_per_batch from Histogram to Summary
---
'@eth-optimism/core-utils': patch
---
Added tests and docstrings to misc functions
---
'@eth-optimism/batch-submitter-service': patch
---
Remove extra space in metric names
---
'@eth-optimism/integration-tests': patch
---
Remove Watcher usage from itests
---
'@eth-optimism/sdk': patch
---
Removes the getTokenBridgeMessagesByAddress function
---
'@eth-optimism/message-relayer': patch
---
Fixes a bug that prevented the relayer from running correctly.
---
'@eth-optimism/sdk': patch
---
This update implements the asL2Provider function
---
'@eth-optimism/batch-submitter-service': patch
---
Default to JSON logs, add LOG_TERMINAL flag for debugging
---
'@eth-optimism/batch-submitter-service': patch
---
Unify metric name format
---
'@eth-optimism/core-utils': patch
---
Refactor folder structure of @eth-optimism/core-utils.
---
'@eth-optimism/sdk': patch
---
Updates the SDK to include default bridges for the local Optimism network (31337)
---
'@eth-optimism/sdk': patch
---
Have SDK sort deposits/withdrawals descending by block number
---
'@eth-optimism/integration-tests': patch
---
Support non-well-known networks
---
'@eth-optimism/integration-tests': patch
---
Use new asL2Provider function for integration tests
---
'@eth-optimism/integration-tests': patch
---
Updates integration tests to use the SDK for bridged token tests
---
'@eth-optimism/integration-tests': patch
---
Skip an unreliable test
---
'@eth-optimism/batch-submitter-service': patch
---
Fixes a bug where clearing txs are rejected on startup due to missing gas limit
name: bss-core unit tests
on:
push:
paths:
- 'go/bss-core/**'
branches:
- 'master'
- 'develop'
- '*rc'
- 'regenesis/*'
pull_request:
paths:
- 'go/bss-core/*'
workflow_dispatch:
defaults:
run:
working-directory: './go/bss-core'
jobs:
tests:
runs-on: ubuntu-latest
steps:
- name: Install Go
uses: actions/setup-go@v2
with:
go-version: 1.16.x
- name: Checkout code
uses: actions/checkout@v2
- name: Test
run: go test -v ./...
......@@ -29,3 +29,8 @@ jobs:
with:
version: v1.29
working-directory: go/batch-submitter
- name: golangci-lint bss-core
uses: golangci/golangci-lint-action@v2
with:
version: v1.29
working-directory: go/bss-core
......@@ -13,4 +13,4 @@ jobs:
- name: Require-reviewers
uses: travelperk/label-requires-reviews-action@v0.1
env:
GITHUB_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN}}
\ No newline at end of file
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
......@@ -54,9 +54,11 @@ func Main(gitVersion string) func(ctx *cli.Context) error {
return err
}
logHandler = bsscore.SentryStreamHandler(os.Stdout, log.TerminalFormat(true))
} else {
logHandler = bsscore.SentryStreamHandler(os.Stdout, log.JSONFormat())
} else if cfg.LogTerminal {
logHandler = log.StreamHandler(os.Stdout, log.TerminalFormat(true))
} else {
logHandler = log.StreamHandler(os.Stdout, log.JSONFormat())
}
logLevel, err := log.LvlFromString(cfg.LogLevel)
......
......@@ -118,6 +118,11 @@ type Config struct {
// LogLevel is the lowest log level that will be output.
LogLevel string
// LogTerminal if true, prints to stdout in terminal format, otherwise
// prints using JSON. If SentryEnable is true this flag is ignored, and logs
// are printed using JSON.
LogTerminal bool
// SentryEnable if true, logs any error messages to sentry. SentryDsn
// must also be set if SentryEnable is true.
SentryEnable bool
......@@ -191,6 +196,7 @@ func NewConfig(ctx *cli.Context) (Config, error) {
ClearPendingTxs: ctx.GlobalBool(flags.ClearPendingTxsFlag.Name),
/* Optional Flags */
LogLevel: ctx.GlobalString(flags.LogLevelFlag.Name),
LogTerminal: ctx.GlobalBool(flags.LogTerminalFlag.Name),
SentryEnable: ctx.GlobalBool(flags.SentryEnableFlag.Name),
SentryDsn: ctx.GlobalString(flags.SentryDsnFlag.Name),
SentryTraceRate: ctx.GlobalDuration(flags.SentryTraceRateFlag.Name),
......
......@@ -129,6 +129,13 @@ var (
Value: "info",
EnvVar: prefixEnvVar("LOG_LEVEL"),
}
LogTerminalFlag = cli.BoolFlag{
Name: "log-terminal",
Usage: "If true, outputs logs in terminal format, otherwise prints " +
"in JSON format. If SENTRY_ENABLE is set to true, this flag is " +
"ignored and logs are printed using JSON",
EnvVar: prefixEnvVar("LOG_TERMINAL"),
}
SentryEnableFlag = cli.BoolFlag{
Name: "sentry-enable",
Usage: "Whether or not to enable Sentry. If true, sentry-dsn must also be set",
......@@ -197,8 +204,8 @@ var (
EnvVar: prefixEnvVar("METRICS_PORT"),
}
HTTP2DisableFlag = cli.BoolFlag{
Name: "http2-disable",
Usage: "Whether or not to disable HTTP/2 support.",
Name: "http2-disable",
Usage: "Whether or not to disable HTTP/2 support.",
EnvVar: prefixEnvVar("HTTP2_DISABLE"),
}
)
......@@ -224,6 +231,7 @@ var requiredFlags = []cli.Flag{
var optionalFlags = []cli.Flag{
LogLevelFlag,
LogTerminalFlag,
SentryEnableFlag,
SentryDsnFlag,
SentryTraceRateFlag,
......
......@@ -140,36 +140,3 @@ func ParseWalletPrivKeyAndContractAddr(
return privKey, contractAddress, nil
}
// parseWalletPrivKeyAndContractAddr returns the wallet private key to use for
// sending transactions as well as the contract address to send to for a
// particular sub-service.
func parseWalletPrivKeyAndContractAddr(
name string,
mnemonic string,
hdPath string,
privKeyStr string,
contractAddrStr string,
) (*ecdsa.PrivateKey, common.Address, error) {
// Parse wallet private key from either privkey string or BIP39 mnemonic
// and BIP32 HD derivation path.
privKey, err := GetConfiguredPrivateKey(mnemonic, hdPath, privKeyStr)
if err != nil {
return nil, common.Address{}, err
}
// Parse the target contract address the wallet will send to.
contractAddress, err := ParseAddress(contractAddrStr)
if err != nil {
return nil, common.Address{}, err
}
// Log wallet address rather than private key...
walletAddress := crypto.PubkeyToAddress(privKey.PublicKey)
log.Info(name+" wallet params parsed successfully", "wallet_address",
walletAddress, "contract_address", contractAddress)
return privKey, contractAddress, nil
}
......@@ -8,6 +8,7 @@ import (
"strings"
"github.com/ethereum-optimism/optimism/go/bss-core/txmgr"
"github.com/ethereum/go-ethereum"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/core"
"github.com/ethereum/go-ethereum/core/types"
......@@ -159,7 +160,20 @@ func SignClearingTx(
}
gasFeeCap := txmgr.CalcGasFeeCap(head.BaseFee, gasTipCap)
tx := CraftClearingTx(walletAddr, nonce, gasFeeCap, gasTipCap)
gasLimit, err := l1Client.EstimateGas(ctx, ethereum.CallMsg{
From: walletAddr,
To: &walletAddr,
GasFeeCap: gasFeeCap,
GasTipCap: gasTipCap,
Value: nil,
Data: nil,
})
if err != nil {
return nil, err
}
tx := CraftClearingTx(walletAddr, nonce, gasFeeCap, gasTipCap, gasLimit)
return types.SignTx(
tx, types.LatestSignerForChainID(chainID), privKey,
......@@ -173,11 +187,13 @@ func CraftClearingTx(
nonce uint64,
gasFeeCap *big.Int,
gasTipCap *big.Int,
gasLimit uint64,
) *types.Transaction {
return types.NewTx(&types.DynamicFeeTx{
To: &walletAddr,
Nonce: nonce,
Gas: gasLimit,
GasFeeCap: gasFeeCap,
GasTipCap: gasTipCap,
Value: nil,
......
......@@ -8,9 +8,10 @@ import (
"testing"
"time"
"github.com/ethereum-optimism/optimism/go/batch-submitter/drivers"
"github.com/ethereum-optimism/optimism/go/bss-core/drivers"
"github.com/ethereum-optimism/optimism/go/bss-core/mock"
"github.com/ethereum-optimism/optimism/go/bss-core/txmgr"
"github.com/ethereum/go-ethereum"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/core"
"github.com/ethereum/go-ethereum/core/types"
......@@ -36,16 +37,18 @@ var (
testGasTipCap = big.NewInt(4)
testBlockNumber = uint64(5)
testBaseFee = big.NewInt(6)
testGasLimit = uint64(7)
)
// TestCraftClearingTx asserts that CraftClearingTx produces the expected
// unsigned clearing transaction.
func TestCraftClearingTx(t *testing.T) {
tx := drivers.CraftClearingTx(
testWalletAddr, testNonce, testGasFeeCap, testGasTipCap,
testWalletAddr, testNonce, testGasFeeCap, testGasTipCap, testGasLimit,
)
require.Equal(t, &testWalletAddr, tx.To())
require.Equal(t, testNonce, tx.Nonce())
require.Equal(t, testGasLimit, tx.Gas())
require.Equal(t, testGasFeeCap, tx.GasFeeCap())
require.Equal(t, testGasTipCap, tx.GasTipCap())
require.Equal(t, new(big.Int), tx.Value())
......@@ -64,6 +67,9 @@ func TestSignClearingTxEstimateGasSuccess(t *testing.T) {
SuggestGasTipCap: func(_ context.Context) (*big.Int, error) {
return testGasTipCap, nil
},
EstimateGas: func(_ context.Context, _ ethereum.CallMsg) (uint64, error) {
return testGasLimit, nil
},
})
expGasFeeCap := new(big.Int).Add(
......@@ -131,6 +137,33 @@ func TestSignClearingTxHeaderByNumberFail(t *testing.T) {
require.Nil(t, tx)
}
// TestSignClearingTxEstimateGasFail asserts that signing a clearing
// transaction will fail if the underlying call to EstimateGas fails.
func TestSignClearingTxEstimateGasFail(t *testing.T) {
errEstimateGas := errors.New("estimate gas")
l1Client := mock.NewL1Client(mock.L1ClientConfig{
EstimateGas: func(_ context.Context, _ ethereum.CallMsg) (uint64, error) {
return 0, errEstimateGas
},
HeaderByNumber: func(_ context.Context, _ *big.Int) (*types.Header, error) {
return &types.Header{
BaseFee: testBaseFee,
}, nil
},
SuggestGasTipCap: func(_ context.Context) (*big.Int, error) {
return testGasTipCap, nil
},
})
tx, err := drivers.SignClearingTx(
"TEST", context.Background(), testWalletAddr, testNonce, l1Client,
testPrivKey, testChainID,
)
require.Equal(t, errEstimateGas, err)
require.Nil(t, tx)
}
type clearPendingTxHarness struct {
l1Client *mock.L1Client
txMgr txmgr.TxManager
......@@ -163,6 +196,11 @@ func newClearPendingTxHarnessWithNumConfs(
return testGasTipCap, nil
}
}
if l1ClientConfig.EstimateGas == nil {
l1ClientConfig.EstimateGas = func(_ context.Context, _ ethereum.CallMsg) (uint64, error) {
return testGasLimit, nil
}
}
l1Client := mock.NewL1Client(l1ClientConfig)
txMgr := txmgr.NewSimpleTxManager("test", txmgr.Config{
......
......@@ -4,6 +4,7 @@ import (
"context"
"math/big"
"github.com/ethereum/go-ethereum"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/core/types"
)
......@@ -11,6 +12,13 @@ import (
// L1Client is an abstraction over an L1 Ethereum client functionality required
// by the batch submitter.
type L1Client interface {
// EstimateGas tries to estimate the gas needed to execute a specific
// transaction based on the current pending state of the backend blockchain.
// There is no guarantee that this is the true gas limit requirement as
// other transactions may be added or removed by miners, but it should
// provide a basis for setting a reasonable default.
EstimateGas(context.Context, ethereum.CallMsg) (uint64, error)
// HeaderByNumber returns a block header from the current canonical chain.
// If number is nil, the latest known header is returned.
HeaderByNumber(context.Context, *big.Int) (*types.Header, error)
......
package metrics
import (
"strings"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
)
......@@ -10,11 +12,11 @@ type Metrics struct {
ETHBalance prometheus.Gauge
// BatchSizeInBytes tracks the size of batch submission transactions.
BatchSizeInBytes prometheus.Histogram
BatchSizeInBytes prometheus.Summary
// NumElementsPerBatch tracks the number of L2 transactions in each batch
// submission.
NumElementsPerBatch prometheus.Histogram
NumElementsPerBatch prometheus.Summary
// SubmissionTimestamp tracks the time at which each batch was confirmed.
SubmissionTimestamp prometheus.Gauge
......@@ -44,9 +46,10 @@ type Metrics struct {
}
func NewMetrics(subsystem string) *Metrics {
subsystem = "batch_submitter_" + strings.ToLower(subsystem)
return &Metrics{
ETHBalance: promauto.NewGauge(prometheus.GaugeOpts{
Name: "batch_submitter_eth_balance",
Name: "balance_eth",
Help: "ETH balance of the batch submitter",
Subsystem: subsystem,
}),
......@@ -56,32 +59,19 @@ func NewMetrics(subsystem string) *Metrics {
Subsystem: subsystem,
Objectives: map[float64]float64{0.5: 0.05, 0.9: 0.01, 0.99: 0.001},
}),
NumElementsPerBatch: promauto.NewHistogram(prometheus.HistogramOpts{
Name: "num_elements_per_batch",
Help: "Number of transaction in each batch",
Buckets: []float64{
250,
500,
750,
1000,
1250,
1500,
1750,
2000,
2250,
2500,
2750,
3000,
},
Subsystem: subsystem,
NumElementsPerBatch: promauto.NewSummary(prometheus.SummaryOpts{
Name: "num_elements_per_batch",
Help: "Number of elements in each batch",
Objectives: map[float64]float64{0.5: 0.05, 0.9: 0.01, 0.99: 0.001},
Subsystem: subsystem,
}),
SubmissionTimestamp: promauto.NewGauge(prometheus.GaugeOpts{
Name: "submission_timestamp",
Name: "submission_timestamp_ms",
Help: "Timestamp of last batch submitter submission",
Subsystem: subsystem,
}),
SubmissionGasUsed: promauto.NewGauge(prometheus.GaugeOpts{
Name: "submission_gas_used",
Name: "submission_gas_used_wei",
Help: "Gas used to submit each batch",
Subsystem: subsystem,
}),
......@@ -101,12 +91,12 @@ func NewMetrics(subsystem string) *Metrics {
Subsystem: subsystem,
}),
BatchConfirmationTime: promauto.NewGauge(prometheus.GaugeOpts{
Name: "batch_submitter_batch_confirmation_time_ms",
Name: "batch_confirmation_time_ms",
Help: "Time to confirm batch transactions",
Subsystem: subsystem,
}),
BatchPruneCount: promauto.NewGauge(prometheus.GaugeOpts{
Name: "batch_submitter_batch_prune_count",
Name: "batch_prune_count",
Help: "Number of times a batch is pruned",
Subsystem: subsystem,
}),
......
......@@ -5,6 +5,7 @@ import (
"math/big"
"sync"
"github.com/ethereum/go-ethereum"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/core/types"
)
......@@ -15,6 +16,13 @@ type L1ClientConfig struct {
// BlockNumber returns the most recent block number.
BlockNumber func(context.Context) (uint64, error)
// EstimateGas tries to estimate the gas needed to execute a specific
// transaction based on the current pending state of the backend blockchain.
// There is no guarantee that this is the true gas limit requirement as
// other transactions may be added or removed by miners, but it should
// provide a basis for setting a reasonable default.
EstimateGas func(context.Context, ethereum.CallMsg) (uint64, error)
// HeaderByNumber returns a block header from the current canonical chain.
// If number is nil, the latest known header is returned.
HeaderByNumber func(context.Context, *big.Int) (*types.Header, error)
......@@ -61,6 +69,18 @@ func (c *L1Client) BlockNumber(ctx context.Context) (uint64, error) {
return c.cfg.BlockNumber(ctx)
}
// EstimateGas tries to estimate the gas needed to execute a specific
// transaction based on the current pending state of the backend blockchain.
// There is no guarantee that this is the true gas limit requirement as other
// transactions may be added or removed by miners, but it should provide a basis
// for setting a reasonable default.
func (c *L1Client) EstimateGas(ctx context.Context, msg ethereum.CallMsg) (uint64, error) {
c.mu.RLock()
defer c.mu.RUnlock()
return c.cfg.EstimateGas(ctx, msg)
}
// HeaderByNumber returns a block header from the current canonical chain. If
// number is nil, the latest known header is returned.
func (c *L1Client) HeaderByNumber(ctx context.Context, blockNumber *big.Int) (*types.Header, error) {
......@@ -113,6 +133,16 @@ func (c *L1Client) SetBlockNumberFunc(
c.cfg.BlockNumber = f
}
// SetEstimateGasFunc overwrites the mock EstimateGas method.
func (c *L1Client) SetEstimateGasFunc(
f func(context.Context, ethereum.CallMsg) (uint64, error)) {
c.mu.Lock()
defer c.mu.Unlock()
c.cfg.EstimateGas = f
}
// SetHeaderByNumberFunc overwrites the mock HeaderByNumber method.
func (c *L1Client) SetHeaderByNumberFunc(
f func(ctx context.Context, blockNumber *big.Int) (*types.Header, error)) {
......
......@@ -16,7 +16,7 @@ func TestCaching(t *testing.T) {
require.NoError(t, err)
defer redis.Close()
hdlr := NewRPCResponseHandler(map[string]string{
hdlr := NewRPCResponseHandler(map[string]interface{}{
"eth_chainId": "0x420",
"net_version": "0x1234",
"eth_blockNumber": "0x64",
......@@ -123,10 +123,25 @@ func TestCaching(t *testing.T) {
})
}
hdlr.SetResponse("eth_blockNumber", "0x100")
time.Sleep(1500 * time.Millisecond)
resRaw, _, err := client.SendRPC("eth_blockNumber", nil)
RequireEqualJSON(t, []byte("{\"id\":999,\"jsonrpc\":\"2.0\",\"result\":\"0x100\"}"), resRaw)
t.Run("block numbers update", func(t *testing.T) {
hdlr.SetResponse("eth_blockNumber", "0x100")
time.Sleep(1500 * time.Millisecond)
resRaw, _, err := client.SendRPC("eth_blockNumber", nil)
require.NoError(t, err)
RequireEqualJSON(t, []byte("{\"id\":999,\"jsonrpc\":\"2.0\",\"result\":\"0x100\"}"), resRaw)
backend.Reset()
})
t.Run("nil responses should not be cached", func(t *testing.T) {
hdlr.SetResponse("eth_getBlockByNumber", nil)
resRaw, _, err := client.SendRPC("eth_getBlockByNumber", []interface{}{"0x123"})
require.NoError(t, err)
resCache, _, err := client.SendRPC("eth_getBlockByNumber", []interface{}{"0x123"})
require.NoError(t, err)
RequireEqualJSON(t, []byte("{\"id\":999,\"jsonrpc\":\"2.0\",\"result\":null}"), resRaw)
RequireEqualJSON(t, resRaw, resCache)
require.Equal(t, 2, countRequests(backend, "eth_getBlockByNumber"))
})
}
func countRequests(backend *MockBackend, name string) int {
......
......@@ -33,18 +33,27 @@ func SingleResponseHandler(code int, response string) http.HandlerFunc {
type RPCResponseHandler struct {
mtx sync.RWMutex
rpcResponses map[string]string
rpcResponses map[string]interface{}
}
func NewRPCResponseHandler(rpcResponses map[string]string) *RPCResponseHandler {
func NewRPCResponseHandler(rpcResponses map[string]interface{}) *RPCResponseHandler {
return &RPCResponseHandler{
rpcResponses: rpcResponses,
}
}
func (h *RPCResponseHandler) SetResponse(method, response string) {
func (h *RPCResponseHandler) SetResponse(method string, response interface{}) {
h.mtx.Lock()
defer h.mtx.Unlock()
switch response.(type) {
case string:
case nil:
break
default:
panic("invalid response type")
}
h.rpcResponses[method] = response
}
......
......@@ -237,7 +237,7 @@ func (s *Server) handleSingleRPC(ctx context.Context, req *RPCReq) (*RPCRes, boo
return NewRPCErrorRes(req.ID, err), false
}
if backendRes.Error == nil {
if backendRes.Error == nil && backendRes.Result != nil {
if err = s.cache.PutRPC(ctx, req, backendRes); err != nil {
log.Warn(
"cache put error",
......
......@@ -3,7 +3,6 @@ import { expect } from 'chai'
import { setupActor, setupRun, actor, run } from './lib/convenience'
import { OptimismEnv } from '../test/shared/env'
import { Direction } from '../test/shared/watcher-utils'
interface BenchContext {
l1Wallet: Wallet
......@@ -42,8 +41,7 @@ actor('Funds depositor', () => {
.depositETH(DEFAULT_TEST_GAS_L2, '0xFFFF', {
value: 0x42,
gasLimit: DEFAULT_TEST_GAS_L1,
}),
Direction.L1ToL2
})
)
})
expect((await l2Wallet.getBalance()).sub(balBefore)).to.deep.equal(
......
import { performance } from 'perf_hooks'
import { Mutex } from 'async-mutex'
import { sleep } from '@eth-optimism/core-utils'
import {
sanitizeForMetrics,
......@@ -11,7 +12,6 @@ import {
failedBenchRunsTotal,
} from './metrics'
import { ActorLogger, WorkerLogger } from './logger'
import { sleep } from '../../test/shared/utils'
// eslint-disable-next-line @typescript-eslint/no-empty-function
const asyncNoop = async () => {}
......
import { utils, Wallet, BigNumber } from 'ethers'
import { utils, Wallet } from 'ethers'
import { expect } from 'chai'
import { actor, setupRun, setupActor, run } from './lib/convenience'
......@@ -34,6 +34,6 @@ actor('Value sender', () => {
value: 0x42,
})
})
expect(await randWallet.getBalance()).to.deep.equal(BigNumber.from(0x42))
expect((await randWallet.getBalance()).toString()).to.deep.equal('66')
})
})
......@@ -4,9 +4,11 @@ pragma solidity ^0.8.9;
contract FakeL2StandardERC20 {
address public immutable l1Token;
address public immutable l2Bridge;
constructor(address _l1Token) {
constructor(address _l1Token, address _l2Bridge) {
l1Token = _l1Token;
l2Bridge = _l2Bridge;
}
// Burn will be called by the L2 Bridge to burn the tokens we are bridging to L1
......
......@@ -118,7 +118,6 @@ describe('Basic L1<>L2 Communication', async () => {
const receipt = await env.messenger.waitForMessageReceipt(transaction)
console.log(await env.messenger.l2Signer.getAddress())
expect(receipt.transactionReceipt.status).to.equal(1)
expect(await L2SimpleStorage.msgSender()).to.equal(
env.messenger.contracts.l2.L2CrossDomainMessenger.address
......
import { BigNumber, Contract, ContractFactory, utils, Wallet } from 'ethers'
import { ethers } from 'hardhat'
import * as L2Artifact from '@eth-optimism/contracts/artifacts/contracts/standards/L2StandardERC20.sol/L2StandardERC20.json'
import { getContractFactory } from '@eth-optimism/contracts'
import { MessageStatus } from '@eth-optimism/sdk'
import { sleep } from '@eth-optimism/core-utils'
import { expect } from './shared/setup'
import { OptimismEnv } from './shared/env'
import { withdrawalTest } from './shared/utils'
import { Direction } from './shared/watcher-utils'
describe('Bridged tokens', () => {
let env: OptimismEnv
before(async () => {
env = await OptimismEnv.new()
})
let otherWalletL1: Wallet
let otherWalletL2: Wallet
let L1Factory__ERC20: ContractFactory
let L1__ERC20: Contract
let L2Factory__ERC20: ContractFactory
let L2__ERC20: Contract
before(async () => {
env = await OptimismEnv.new()
const other = Wallet.createRandom()
otherWalletL1 = other.connect(env.l1Wallet.provider)
otherWalletL2 = other.connect(env.l2Wallet.provider)
let tx = await env.l1Wallet.sendTransaction({
const tx1 = await env.l1Wallet.sendTransaction({
to: otherWalletL1.address,
value: utils.parseEther('0.01'),
})
await tx.wait()
tx = await env.l2Wallet.sendTransaction({
await tx1.wait()
const tx2 = await env.l2Wallet.sendTransaction({
to: otherWalletL2.address,
value: utils.parseEther('0.01'),
})
await tx.wait()
await tx2.wait()
})
let L1Factory__ERC20: ContractFactory
let L2Factory__ERC20: ContractFactory
before(async () => {
L1Factory__ERC20 = await ethers.getContractFactory('ERC20', env.l1Wallet)
L2Factory__ERC20 = new ethers.ContractFactory(
L2Artifact.abi,
L2Artifact.bytecode
)
L2Factory__ERC20 = L2Factory__ERC20.connect(env.l2Wallet)
L2Factory__ERC20 = getContractFactory('L2StandardERC20', env.l2Wallet)
})
it('should deploy an ERC20 on L1', async () => {
// This is one of the only stateful integration tests in which we don't set up a new contract
// before each test. We do this because the test is more of an "actor-based" test where we're
// going through a series of actions and confirming that the actions are performed correctly at
// every step.
let L1__ERC20: Contract
let L2__ERC20: Contract
before(async () => {
// Deploy the L1 ERC20
L1__ERC20 = await L1Factory__ERC20.deploy(1000000, 'OVM Test', 8, 'OVM')
await L1__ERC20.deployed()
})
it('should deploy a paired token on L2', async () => {
// Deploy the L2 ERC20
L2__ERC20 = await L2Factory__ERC20.deploy(
'0x4200000000000000000000000000000000000010',
L1__ERC20.address,
......@@ -56,22 +59,21 @@ describe('Bridged tokens', () => {
'OVM'
)
await L2__ERC20.deployed()
})
it('should approve the bridge', async () => {
// Approve the L1 ERC20 to spend our money
const tx = await L1__ERC20.approve(env.l1Bridge.address, 1000000)
await tx.wait()
})
it('should deposit tokens into L2', async () => {
const tx = await env.l1Bridge.depositERC20(
L1__ERC20.address,
L2__ERC20.address,
1000,
2000000,
'0x'
await env.messenger.waitForMessageReceipt(
await env.messenger.depositERC20(
L1__ERC20.address,
L2__ERC20.address,
1000
)
)
await env.waitForXDomainTransaction(tx, Direction.L1ToL2)
expect(await L1__ERC20.balanceOf(env.l1Wallet.address)).to.deep.equal(
BigNumber.from(999000)
)
......@@ -83,6 +85,7 @@ describe('Bridged tokens', () => {
it('should transfer tokens on L2', async () => {
const tx = await L2__ERC20.transfer(otherWalletL1.address, 500)
await tx.wait()
expect(await L2__ERC20.balanceOf(env.l2Wallet.address)).to.deep.equal(
BigNumber.from(500)
)
......@@ -94,14 +97,22 @@ describe('Bridged tokens', () => {
withdrawalTest(
'should withdraw tokens from L2 to the depositor',
async () => {
const tx = await env.l2Bridge.withdraw(
const tx = await env.messenger.withdrawERC20(
L1__ERC20.address,
L2__ERC20.address,
500,
2000000,
'0x'
500
)
await env.relayXDomainMessages(tx)
await env.waitForXDomainTransaction(tx, Direction.L2ToL1)
// TODO: Maybe this should be built into the SDK
let status: MessageStatus
while (status !== MessageStatus.READY_FOR_RELAY) {
status = await env.messenger.getMessageStatus(tx)
await sleep(1000)
}
await env.messenger.finalizeMessage(tx)
await env.messenger.waitForMessageReceipt(tx)
expect(await L1__ERC20.balanceOf(env.l1Wallet.address)).to.deep.equal(
BigNumber.from(999500)
)
......@@ -114,11 +125,25 @@ describe('Bridged tokens', () => {
withdrawalTest(
'should withdraw tokens from L2 to the transfer recipient',
async () => {
const tx = await env.l2Bridge
.connect(otherWalletL2)
.withdraw(L2__ERC20.address, 500, 2000000, '0x')
await env.relayXDomainMessages(tx)
await env.waitForXDomainTransaction(tx, Direction.L2ToL1)
const tx = await env.messenger.withdrawERC20(
L1__ERC20.address,
L2__ERC20.address,
500,
{
signer: otherWalletL2,
}
)
// TODO: Maybe this should be built into the SDK
let status: MessageStatus
while (status !== MessageStatus.READY_FOR_RELAY) {
status = await env.messenger.getMessageStatus(tx)
await sleep(1000)
}
await env.messenger.finalizeMessage(tx)
await env.messenger.waitForMessageReceipt(tx)
expect(await L1__ERC20.balanceOf(otherWalletL1.address)).to.deep.equal(
BigNumber.from(500)
)
......@@ -134,46 +159,52 @@ describe('Bridged tokens', () => {
withdrawalTest(
'should not allow an arbitrary L2 token to be withdrawn in exchange for a legitimate L1 token',
async () => {
before(async () => {
// First deposit some of the L1 token to L2, so that there is something which could be stolen.
const depositTx = await env.l1Bridge
.connect(env.l1Wallet)
.depositERC20(
L1__ERC20.address,
L2__ERC20.address,
1000,
2000000,
'0x'
)
await env.waitForXDomainTransaction(depositTx, Direction.L1ToL2)
expect(await L2__ERC20.balanceOf(env.l2Wallet.address)).to.deep.equal(
BigNumber.from(1000)
// First deposit some of the L1 token to L2, so that there is something which could be stolen.
await env.messenger.waitForMessageReceipt(
await env.messenger.depositERC20(
L1__ERC20.address,
L2__ERC20.address,
1000
)
})
)
expect(await L2__ERC20.balanceOf(env.l2Wallet.address)).to.deep.equal(
BigNumber.from(1000)
)
// Deploy a Fake L2 token, which:
// - returns the address of a legitimate L1 token from its l1Token() getter.
// - allows the L2 bridge to call its burn() function.
const fakeToken = await (
await ethers.getContractFactory('FakeL2StandardERC20', env.l2Wallet)
).deploy(L1__ERC20.address)
).deploy(
L1__ERC20.address,
env.messenger.contracts.l2.L2StandardBridge.address
)
await fakeToken.deployed()
const balBefore = await L1__ERC20.balanceOf(otherWalletL1.address)
// Withdraw some of the Fake L2 token, hoping to receive the same amount of the legitimate
// token on L1.
const withdrawalTx = await env.l2Bridge
.connect(otherWalletL2)
.withdrawTo(
fakeToken.address,
otherWalletL1.address,
500,
1_000_000,
'0x'
)
await env.relayXDomainMessages(withdrawalTx)
await env.waitForXDomainTransaction(withdrawalTx, Direction.L2ToL1)
const withdrawalTx = await env.messenger.withdrawERC20(
L1__ERC20.address,
fakeToken.address,
500,
{
signer: otherWalletL2,
}
)
// TODO: Maybe this should be built into the SDK
let status: MessageStatus
while (status !== MessageStatus.READY_FOR_RELAY) {
status = await env.messenger.getMessageStatus(withdrawalTx)
await sleep(1000)
}
await env.messenger.finalizeMessage(withdrawalTx)
await env.messenger.waitForMessageReceipt(withdrawalTx)
// Ensure that the L1 recipient address has not received any additional L1 token balance.
expect(await L1__ERC20.balanceOf(otherWalletL1.address)).to.deep.equal(
......
......@@ -7,7 +7,6 @@ import { predeploys, getContractFactory } from '@eth-optimism/contracts'
import { expect } from './shared/setup'
import { hardhatTest } from './shared/utils'
import { OptimismEnv } from './shared/env'
import { Direction } from './shared/watcher-utils'
const setPrices = async (env: OptimismEnv, value: number | BigNumber) => {
const gasPrice = await env.gasPriceOracle.setGasPrice(value)
......@@ -171,7 +170,7 @@ describe('Fee Payment Integration Tests', async () => {
// Wait for the withdrawal to be relayed to L1.
await withdrawTx.wait()
await env.relayXDomainMessages(withdrawTx)
await env.waitForXDomainTransaction(withdrawTx, Direction.L2ToL1)
await env.waitForXDomainTransaction(withdrawTx)
// Balance difference should be equal to old L2 balance.
const balanceAfter = await env.l1Wallet.provider.getBalance(l1FeeWallet)
......
......@@ -38,7 +38,7 @@ describe('Native ETH value integration tests', () => {
}
const value = ethers.utils.parseEther('0.01')
await fundUser(env.watcher, env.l1Bridge, value, wallet.address)
await fundUser(env.messenger, value, wallet.address)
const initialBalances = await getBalances()
......@@ -156,12 +156,7 @@ describe('Native ETH value integration tests', () => {
beforeEach(async () => {
ValueCalls0 = await Factory__ValueCalls.deploy()
ValueCalls1 = await Factory__ValueCalls.deploy()
await fundUser(
env.watcher,
env.l1Bridge,
initialBalance0,
ValueCalls0.address
)
await fundUser(env.messenger, initialBalance0, ValueCalls0.address)
// These tests ass assume ValueCalls0 starts with a balance, but ValueCalls1 does not.
await checkBalances([initialBalance0, 0])
})
......@@ -203,12 +198,7 @@ describe('Native ETH value integration tests', () => {
it('should have the correct ovmSELFBALANCE which includes the msg.value', async () => {
// give an initial balance which the ovmCALLVALUE should be added to when calculating ovmSELFBALANCE
const initialBalance = 10
await fundUser(
env.watcher,
env.l1Bridge,
initialBalance,
ValueCalls1.address
)
await fundUser(env.messenger, initialBalance, ValueCalls1.address)
const sendAmount = 15
const [success, returndata] = await ValueCalls0.callStatic.sendWithData(
......
......@@ -6,7 +6,6 @@ import { expectApprox } from '@eth-optimism/core-utils'
/* Imports: Internal */
import { expect } from './shared/setup'
import { Direction } from './shared/watcher-utils'
import {
DEFAULT_TEST_GAS_L1,
DEFAULT_TEST_GAS_L2,
......@@ -71,8 +70,7 @@ describe('Native ETH Integration Tests', async () => {
to: env.l1Bridge.address,
value: depositAmount,
gasLimit: DEFAULT_TEST_GAS_L1,
}),
Direction.L1ToL2
})
)
const l1FeePaid = receipt.gasUsed.mul(tx.gasPrice)
......@@ -96,8 +94,7 @@ describe('Native ETH Integration Tests', async () => {
env.l1Bridge.depositETH(DEFAULT_TEST_GAS_L2, '0xFFFF', {
value: depositAmount,
gasLimit: DEFAULT_TEST_GAS_L1,
}),
Direction.L1ToL2
})
)
const l1FeePaid = receipt.gasUsed.mul(tx.gasPrice)
......@@ -121,8 +118,7 @@ describe('Native ETH Integration Tests', async () => {
env.l1Bridge.depositETHTo(l2Bob.address, DEFAULT_TEST_GAS_L2, '0xFFFF', {
value: depositAmount,
gasLimit: DEFAULT_TEST_GAS_L1,
}),
Direction.L1ToL2
})
)
const l1FeePaid = depositReceipts.receipt.gasUsed.mul(
......@@ -152,8 +148,7 @@ describe('Native ETH Integration Tests', async () => {
env.l1Bridge.depositETH(ASSUMED_L2_GAS_LIMIT, data, {
value: depositAmount,
gasLimit: 4_000_000,
}),
Direction.L1ToL2
})
)
const l1FeePaid = receipt.gasUsed.mul(tx.gasPrice)
......@@ -196,10 +191,7 @@ describe('Native ETH Integration Tests', async () => {
)
await transaction.wait()
await env.relayXDomainMessages(transaction)
const receipts = await env.waitForXDomainTransaction(
transaction,
Direction.L2ToL1
)
const receipts = await env.waitForXDomainTransaction(transaction)
const fee = receipts.tx.gasLimit.mul(receipts.tx.gasPrice)
const postBalances = await getBalances(env)
......@@ -242,10 +234,7 @@ describe('Native ETH Integration Tests', async () => {
await transaction.wait()
await env.relayXDomainMessages(transaction)
const receipts = await env.waitForXDomainTransaction(
transaction,
Direction.L2ToL1
)
const receipts = await env.waitForXDomainTransaction(transaction)
const l2Fee = receipts.tx.gasPrice.mul(receipts.receipt.gasUsed)
......@@ -289,8 +278,7 @@ describe('Native ETH Integration Tests', async () => {
env.l1Bridge.depositETH(DEFAULT_TEST_GAS_L2, '0xFFFF', {
value: amount,
gasLimit: DEFAULT_TEST_GAS_L1,
}),
Direction.L1ToL2
})
)
// 2. transfer to another address
......@@ -317,10 +305,7 @@ describe('Native ETH Integration Tests', async () => {
)
await transaction.wait()
await env.relayXDomainMessages(transaction)
const receipts = await env.waitForXDomainTransaction(
transaction,
Direction.L2ToL1
)
const receipts = await env.waitForXDomainTransaction(transaction)
// Compute the L1 portion of the fee
const l1Fee = await env.gasPriceOracle.getL1Fee(
......
/* Imports: External */
import { ethers } from 'hardhat'
import { injectL2Context, expectApprox } from '@eth-optimism/core-utils'
import { expectApprox } from '@eth-optimism/core-utils'
import { predeploys } from '@eth-optimism/contracts'
import { asL2Provider } from '@eth-optimism/sdk'
import { Contract, BigNumber } from 'ethers'
/* Imports: Internal */
......@@ -13,7 +14,6 @@ import {
DEFAULT_TEST_GAS_L1,
} from './shared/utils'
import { OptimismEnv } from './shared/env'
import { Direction } from './shared/watcher-utils'
/**
* These tests cover the OVM execution contexts. In the OVM execution
......@@ -21,7 +21,7 @@ import { Direction } from './shared/watcher-utils'
* must be equal to the blocknumber/timestamp of the L1 transaction.
*/
describe('OVM Context: Layer 2 EVM Context', () => {
const L2Provider = injectL2Context(l2Provider)
const L2Provider = asL2Provider(l2Provider)
let env: OptimismEnv
before(async () => {
env = await OptimismEnv.new()
......@@ -62,7 +62,7 @@ describe('OVM Context: Layer 2 EVM Context', () => {
// Wait for the transaction to be sent over to L2.
await tx.wait()
const pair = await env.waitForXDomainTransaction(tx, Direction.L1ToL2)
const pair = await env.waitForXDomainTransaction(tx)
// Get the L1 block that the enqueue transaction was in so that
// the timestamp can be compared against the layer two contract
......@@ -144,33 +144,32 @@ describe('OVM Context: Layer 2 EVM Context', () => {
* context. The data returned should match what is actually being used as the
* OVM context.
*/
it('should return same timestamp and blocknumbers between `eth_call` and `rollup_getInfo`', async () => {
// As atomically as possible, call `rollup_getInfo` and Multicall for the
// blocknumber and timestamp. If this is not atomic, then the sequencer can
// happend to update the timestamp between the `eth_call` and the `rollup_getInfo`
const [info, [, returnData]] = await Promise.all([
L2Provider.send('rollup_getInfo', []),
Multicall.callStatic.aggregate([
[
OVMContextStorage.address,
OVMContextStorage.interface.encodeFunctionData(
'getCurrentBlockTimestamp'
),
],
[
OVMContextStorage.address,
OVMContextStorage.interface.encodeFunctionData(
'getCurrentL1BlockNumber'
),
],
]),
])
const timestamp = BigNumber.from(returnData[0])
const blockNumber = BigNumber.from(returnData[1])
expect(info.ethContext.blockNumber).to.deep.equal(blockNumber.toNumber())
expect(info.ethContext.timestamp).to.deep.equal(timestamp.toNumber())
// TODO: This test is not reliable. If we really care about this then we need to figure out a
// more reliable way to test this behavior.
it.skip('should return same timestamp and blocknumbers between `eth_call` and `rollup_getInfo`', async () => {
// // As atomically as possible, call `rollup_getInfo` and Multicall for the
// // blocknumber and timestamp. If this is not atomic, then the sequencer can
// // happend to update the timestamp between the `eth_call` and the `rollup_getInfo`
// const [info, [, returnData]] = await Promise.all([
// L2Provider.send('rollup_getInfo', []),
// Multicall.callStatic.aggregate([
// [
// OVMContextStorage.address,
// OVMContextStorage.interface.encodeFunctionData(
// 'getCurrentBlockTimestamp'
// ),
// ],
// [
// OVMContextStorage.address,
// OVMContextStorage.interface.encodeFunctionData(
// 'getCurrentL1BlockNumber'
// ),
// ],
// ]),
// ])
// const timestamp = BigNumber.from(returnData[0])
// const blockNumber = BigNumber.from(returnData[1])
// expect(info.ethContext.blockNumber).to.deep.equal(blockNumber.toNumber())
// expect(info.ethContext.timestamp).to.deep.equal(timestamp.toNumber())
})
})
/* Imports: Internal */
import { providers } from 'ethers'
import { injectL2Context, applyL1ToL2Alias } from '@eth-optimism/core-utils'
import { applyL1ToL2Alias } from '@eth-optimism/core-utils'
import { asL2Provider } from '@eth-optimism/sdk'
/* Imports: External */
import { expect } from './shared/setup'
import { OptimismEnv } from './shared/env'
import { Direction } from './shared/watcher-utils'
import { DEFAULT_TEST_GAS_L1, envConfig } from './shared/utils'
describe('Queue Ingestion', () => {
......@@ -13,7 +13,7 @@ describe('Queue Ingestion', () => {
let l2Provider: providers.JsonRpcProvider
before(async () => {
env = await OptimismEnv.new()
l2Provider = injectL2Context(env.l2Wallet.provider as any)
l2Provider = asL2Provider(env.l2Wallet.provider as any)
})
// The batch submitter will notice that there are transactions
......@@ -41,10 +41,7 @@ describe('Queue Ingestion', () => {
for (let i = 0; i < numTxs; i++) {
const l1Tx = txs[i]
const l1TxReceipt = await txs[i].wait()
const receipt = await env.waitForXDomainTransaction(
l1Tx,
Direction.L1ToL2
)
const receipt = await env.waitForXDomainTransaction(l1Tx)
const l2Tx = (await l2Provider.getTransaction(
receipt.remoteTx.hash
)) as any
......
/* Imports: External */
import { TransactionReceipt } from '@ethersproject/abstract-provider'
import { sleep } from '@eth-optimism/core-utils'
/* Imports: Internal */
import { expect } from './shared/setup'
import { OptimismEnv } from './shared/env'
import {
defaultTransactionFactory,
gasPriceForL2,
sleep,
envConfig,
} from './shared/utils'
......
import { expectApprox, injectL2Context } from '@eth-optimism/core-utils'
/* Imports: External */
import { expectApprox, sleep } from '@eth-optimism/core-utils'
import { asL2Provider } from '@eth-optimism/sdk'
import { Wallet, BigNumber, Contract, ContractFactory, constants } from 'ethers'
import { serialize } from '@ethersproject/transactions'
import { ethers } from 'hardhat'
......@@ -7,8 +9,8 @@ import {
TransactionRequest,
} from '@ethersproject/providers'
/* Imports: Internal */
import {
sleep,
l2Provider,
defaultTransactionFactory,
fundUser,
......@@ -25,7 +27,7 @@ describe('Basic RPC tests', () => {
let env: OptimismEnv
let wallet: Wallet
const provider = injectL2Context(l2Provider)
const provider = asL2Provider(l2Provider)
let Reverter: Contract
let ValueContext: Contract
......@@ -219,7 +221,7 @@ describe('Basic RPC tests', () => {
// Fund account to call from
const from = wallet.address
const value = 15
await fundUser(env.watcher, env.l1Bridge, value, from)
await fundUser(env.messenger, value, from)
// Do the call and check msg.value
const data = ValueContext.interface.encodeFunctionData('getCallValue')
......
......@@ -2,7 +2,7 @@
import { Contract, utils, Wallet, providers } from 'ethers'
import { TransactionResponse } from '@ethersproject/providers'
import { getContractFactory, predeploys } from '@eth-optimism/contracts'
import { Watcher } from '@eth-optimism/core-utils'
import { sleep } from '@eth-optimism/core-utils'
import { getMessagesAndProofsForL2Transaction } from '@eth-optimism/message-relayer'
import { CrossChainMessenger } from '@eth-optimism/sdk'
......@@ -20,14 +20,11 @@ import {
getOvmEth,
getL1Bridge,
getL2Bridge,
sleep,
envConfig,
DEFAULT_TEST_GAS_L1,
} from './utils'
import {
initWatcher,
CrossDomainMessagePair,
Direction,
waitForXDomainTransaction,
} from './watcher-utils'
......@@ -48,9 +45,6 @@ export class OptimismEnv {
gasPriceOracle: Contract
sequencerFeeVault: Contract
// The L1 <> L2 State watcher
watcher: Watcher
// The wallets
l1Wallet: Wallet
l2Wallet: Wallet
......@@ -72,7 +66,6 @@ export class OptimismEnv {
this.l2Messenger = args.l2Messenger
this.gasPriceOracle = args.gasPriceOracle
this.sequencerFeeVault = args.sequencerFeeVault
this.watcher = args.watcher
this.l1Wallet = args.l1Wallet
this.l2Wallet = args.l2Wallet
this.messenger = args.messenger
......@@ -85,26 +78,25 @@ export class OptimismEnv {
}
static async new(): Promise<OptimismEnv> {
const network = await l1Provider.getNetwork()
const addressManager = getAddressManager(l1Wallet)
const watcher = await initWatcher(l1Provider, l2Provider, addressManager)
const l1Bridge = await getL1Bridge(l1Wallet, addressManager)
// fund the user if needed
const balance = await l2Wallet.getBalance()
const min = envConfig.L2_WALLET_MIN_BALANCE_ETH.toString()
const topUp = envConfig.L2_WALLET_TOP_UP_AMOUNT_ETH.toString()
if (balance.lt(utils.parseEther(min))) {
await fundUser(watcher, l1Bridge, utils.parseEther(topUp))
}
const l1MessengerAddress = await addressManager.getAddress(
'Proxy__OVM_L1CrossDomainMessenger'
)
const l2MessengerAddress = await addressManager.getAddress(
'L2CrossDomainMessenger'
)
const l1Messenger = getContractFactory('L1CrossDomainMessenger')
.connect(l1Wallet)
.attach(watcher.l1.messengerAddress)
.attach(l1MessengerAddress)
const ovmEth = getOvmEth(l2Wallet)
const l2Bridge = await getL2Bridge(l2Wallet)
const l2Messenger = getContractFactory('L2CrossDomainMessenger')
.connect(l2Wallet)
.attach(watcher.l2.messengerAddress)
.attach(l2MessengerAddress)
const ctcAddress = await addressManager.getAddress(
'CanonicalTransactionChain'
)
......@@ -129,13 +121,30 @@ export class OptimismEnv {
.connect(l2Wallet)
.attach(predeploys.OVM_L1BlockNumber)
const network = await l1Provider.getNetwork()
const messenger = new CrossChainMessenger({
l1SignerOrProvider: l1Wallet,
l2SignerOrProvider: l2Wallet,
l1ChainId: network.chainId,
contracts: {
l1: {
AddressManager: envConfig.ADDRESS_MANAGER,
L1CrossDomainMessenger: l1Messenger.address,
L1StandardBridge: l1Bridge.address,
StateCommitmentChain: sccAddress,
CanonicalTransactionChain: ctcAddress,
BondManager: await addressManager.getAddress('BondManager'),
},
},
})
// fund the user if needed
const balance = await l2Wallet.getBalance()
const min = envConfig.L2_WALLET_MIN_BALANCE_ETH.toString()
const topUp = envConfig.L2_WALLET_TOP_UP_AMOUNT_ETH.toString()
if (balance.lt(utils.parseEther(min))) {
await fundUser(messenger, utils.parseEther(topUp))
}
return new OptimismEnv({
addressManager,
l1Bridge,
......@@ -148,7 +157,6 @@ export class OptimismEnv {
sequencerFeeVault,
l2Bridge,
l2Messenger,
watcher,
l1Wallet,
l2Wallet,
messenger,
......@@ -160,10 +168,9 @@ export class OptimismEnv {
}
async waitForXDomainTransaction(
tx: Promise<TransactionResponse> | TransactionResponse,
direction: Direction
tx: Promise<TransactionResponse> | TransactionResponse
): Promise<CrossDomainMessagePair> {
return waitForXDomainTransaction(this.watcher, tx, direction)
return waitForXDomainTransaction(this.messenger, tx)
}
/**
......
/* Imports: External */
import { ethers } from 'ethers'
import { sleep } from '@eth-optimism/core-utils'
/* Imports: Internal */
import { OptimismEnv } from './env'
import { Direction } from './watcher-utils'
import { gasPriceForL1, gasPriceForL2, sleep } from './utils'
import { gasPriceForL1, gasPriceForL2 } from './utils'
interface TransactionParams {
contract: ethers.Contract
......@@ -51,7 +51,7 @@ export const executeL1ToL2Transaction = async (
}
)
)
await env.waitForXDomainTransaction(receipt, Direction.L1ToL2)
await env.waitForXDomainTransaction(receipt)
}
export const executeL2ToL1Transaction = async (
......@@ -77,7 +77,7 @@ export const executeL2ToL1Transaction = async (
)
await env.relayXDomainMessages(receipt)
await env.waitForXDomainTransaction(receipt, Direction.L2ToL1)
await env.waitForXDomainTransaction(receipt)
}
export const executeL2Transaction = async (
......
......@@ -4,7 +4,6 @@ import {
Wallet,
constants,
providers,
BigNumberish,
BigNumber,
utils,
} from 'ethers'
......@@ -13,13 +12,17 @@ import {
getContractInterface,
predeploys,
} from '@eth-optimism/contracts'
import { injectL2Context, remove0x, Watcher } from '@eth-optimism/core-utils'
import { remove0x } from '@eth-optimism/core-utils'
import {
CrossChainMessenger,
NumberLike,
asL2Provider,
} from '@eth-optimism/sdk'
import { cleanEnv, str, num, bool, makeValidator } from 'envalid'
import dotenv from 'dotenv'
dotenv.config()
/* Imports: Internal */
import { Direction, waitForXDomainTransaction } from './watcher-utils'
import { OptimismEnv } from './env'
export const isLiveNetwork = () => {
......@@ -113,17 +116,17 @@ export const envConfig = procEnv
export const l1Provider = new providers.JsonRpcProvider(procEnv.L1_URL)
l1Provider.pollingInterval = procEnv.L1_POLLING_INTERVAL
export const l2Provider = injectL2Context(
export const l2Provider = asL2Provider(
new providers.JsonRpcProvider(procEnv.L2_URL)
)
l2Provider.pollingInterval = procEnv.L2_POLLING_INTERVAL
export const replicaProvider = injectL2Context(
export const replicaProvider = asL2Provider(
new providers.JsonRpcProvider(procEnv.REPLICA_URL)
)
replicaProvider.pollingInterval = procEnv.REPLICA_POLLING_INTERVAL
export const verifierProvider = injectL2Context(
export const verifierProvider = asL2Provider(
new providers.JsonRpcProvider(procEnv.VERIFIER_URL)
)
verifierProvider.pollingInterval = procEnv.L2_POLLING_INTERVAL
......@@ -180,23 +183,26 @@ export const getOvmEth = (wallet: Wallet) => {
}
export const fundUser = async (
watcher: Watcher,
bridge: Contract,
amount: BigNumberish,
messenger: CrossChainMessenger,
amount: NumberLike,
recipient?: string
) => {
const value = BigNumber.from(amount)
const tx = recipient
? bridge.depositETHTo(recipient, DEFAULT_TEST_GAS_L2, '0x', {
value,
gasLimit: DEFAULT_TEST_GAS_L1,
})
: bridge.depositETH(DEFAULT_TEST_GAS_L2, '0x', {
value,
gasLimit: DEFAULT_TEST_GAS_L1,
})
await waitForXDomainTransaction(watcher, tx, Direction.L1ToL2)
await messenger.waitForMessageReceipt(
await messenger.depositETH(amount, {
l2GasLimit: DEFAULT_TEST_GAS_L2,
overrides: {
gasPrice: DEFAULT_TEST_GAS_L1,
},
})
)
if (recipient !== undefined) {
const tx = await messenger.l2Signer.sendTransaction({
to: recipient,
value: amount,
})
await tx.wait()
}
}
export const conditionalTest = (
......@@ -235,8 +241,6 @@ export const hardhatTest = (name, fn) =>
'Skipping test on non-Hardhat environment.'
)
export const sleep = (ms: number) => new Promise((r) => setTimeout(r, ms))
const abiCoder = new utils.AbiCoder()
export const encodeSolidityRevertMessage = (_reason: string): string => {
return '0x08c379a0' + remove0x(abiCoder.encode(['string'], [_reason]))
......
import {
JsonRpcProvider,
TransactionReceipt,
TransactionResponse,
} from '@ethersproject/providers'
import { Watcher } from '@eth-optimism/core-utils'
import { Contract, Transaction } from 'ethers'
export const initWatcher = async (
l1Provider: JsonRpcProvider,
l2Provider: JsonRpcProvider,
AddressManager: Contract
) => {
const l1MessengerAddress = await AddressManager.getAddress(
'Proxy__OVM_L1CrossDomainMessenger'
)
const l2MessengerAddress = await AddressManager.getAddress(
'L2CrossDomainMessenger'
)
return new Watcher({
l1: {
provider: l1Provider,
messengerAddress: l1MessengerAddress,
},
l2: {
provider: l2Provider,
messengerAddress: l2MessengerAddress,
},
})
}
import { Transaction } from 'ethers'
import { CrossChainMessenger, MessageDirection } from '@eth-optimism/sdk'
export interface CrossDomainMessagePair {
tx: Transaction
......@@ -42,41 +18,33 @@ export enum Direction {
}
export const waitForXDomainTransaction = async (
watcher: Watcher,
tx: Promise<TransactionResponse> | TransactionResponse,
direction: Direction
messenger: CrossChainMessenger,
tx: Promise<TransactionResponse> | TransactionResponse
): Promise<CrossDomainMessagePair> => {
const { src, dest } =
direction === Direction.L1ToL2
? { src: watcher.l1, dest: watcher.l2 }
: { src: watcher.l2, dest: watcher.l1 }
// await it if needed
tx = await tx
// get the receipt and the full transaction
const receipt = await tx.wait()
const fullTx = await src.provider.getTransaction(tx.hash)
// get the message hash which was created on the SentMessage
const [xDomainMsgHash] = await watcher.getMessageHashesFromTx(src, tx.hash)
if (!xDomainMsgHash) {
throw new Error(`No x-domain message hash for tx hash ${tx.hash}, bailing.`)
const receipt = await tx.wait()
const resolved = await messenger.toCrossChainMessage(tx)
const messageReceipt = await messenger.waitForMessageReceipt(tx)
let fullTx: any
let remoteTx: any
if (resolved.direction === MessageDirection.L1_TO_L2) {
fullTx = await messenger.l1Provider.getTransaction(tx.hash)
remoteTx = await messenger.l2Provider.getTransaction(
messageReceipt.transactionReceipt.transactionHash
)
} else {
fullTx = await messenger.l2Provider.getTransaction(tx.hash)
remoteTx = await messenger.l1Provider.getTransaction(
messageReceipt.transactionReceipt.transactionHash
)
}
// Get the transaction and receipt on the remote layer
const remoteReceipt = await watcher.getTransactionReceipt(
dest,
xDomainMsgHash
)
const remoteTx = await dest.provider.getTransaction(
remoteReceipt.transactionHash
)
return {
tx: fullTx,
receipt,
remoteTx,
remoteReceipt,
remoteReceipt: messageReceipt.transactionReceipt,
}
}
......@@ -47,12 +47,7 @@ describe('stress tests', () => {
}
for (const wallet of wallets) {
await fundUser(
env.watcher,
env.l1Bridge,
utils.parseEther('0.1'),
wallet.address
)
await fundUser(env.messenger, utils.parseEther('0.1'), wallet.address)
}
})
......
/* Imports: External */
import { TransactionReceipt } from '@ethersproject/abstract-provider'
import { sleep } from '@eth-optimism/core-utils'
/* Imports: Internal */
import { expect } from './shared/setup'
import { OptimismEnv } from './shared/env'
import {
defaultTransactionFactory,
gasPriceForL2,
sleep,
envConfig,
} from './shared/utils'
......
......@@ -43,7 +43,3 @@ ps-metrics:
-f docker-compose-metrics.yml \
ps
.PHONY: ps
regenesis-surgery:
docker build -f ./docker/Dockerfile.regenesis-surgery \
-t ethereumoptimism/regenesis-surgery:latest ..
......@@ -32,7 +32,6 @@ COPY packages/data-transport-layer/package.json ./packages/data-transport-layer/
COPY packages/batch-submitter/package.json ./packages/batch-submitter/package.json
COPY packages/message-relayer/package.json ./packages/message-relayer/package.json
COPY packages/replica-healthcheck/package.json ./packages/replica-healthcheck/package.json
COPY packages/regenesis-surgery/package.json ./packages/regenesis-surgery/package.json
COPY integration-tests/package.json ./integration-tests/package.json
RUN yarn install --frozen-lockfile
......
......@@ -21,7 +21,6 @@ COPY packages/data-transport-layer/package.json ./packages/data-transport-layer/
COPY packages/batch-submitter/package.json ./packages/batch-submitter/package.json
COPY packages/message-relayer/package.json ./packages/message-relayer/package.json
COPY packages/replica-healthcheck/package.json ./packages/replica-healthcheck/package.json
COPY packages/regenesis-surgery/package.json ./packages/regenesis-surgery/package.json
COPY integration-tests/package.json ./integration-tests/package.json
RUN yarn install --frozen-lockfile
......
ARG LOCAL_REGISTRY=docker.io
ARG BUILDER_TAG=latest
FROM ${LOCAL_REGISTRY}/ethereumoptimism/builder:${BUILDER_TAG} AS builder
FROM node:16-alpine
RUN apk add --no-cache curl bash jq
WORKDIR /opt/optimism
# copy top level files
COPY --from=builder /optimism/*.json /optimism/yarn.lock ./
COPY --from=builder /optimism/node_modules ./node_modules
# copy deps (would have been nice if docker followed the symlinks required)
COPY --from=builder /optimism/packages/core-utils/package.json ./packages/core-utils/package.json
COPY --from=builder /optimism/packages/core-utils/dist ./packages/core-utils/dist
COPY --from=builder /optimism/packages/common-ts/package.json ./packages/common-ts/package.json
COPY --from=builder /optimism/packages/common-ts/dist ./packages/common-ts/dist
COPY --from=builder /optimism/packages/contracts/package.json ./packages/contracts/package.json
COPY --from=builder /optimism/packages/contracts/deployments ./packages/contracts/deployments
COPY --from=builder /optimism/packages/contracts/dist ./packages/contracts/dist
COPY --from=builder /optimism/packages/contracts/artifacts ./packages/contracts/artifacts
# copy the service
WORKDIR /opt/optimism/packages/regenesis-surgery
COPY --from=builder /optimism/packages/regenesis-surgery/package.json ./
COPY --from=builder /optimism/packages/regenesis-surgery/scripts ./scripts
COPY --from=builder /optimism/packages/regenesis-surgery/node_modules ./node_modules
......@@ -3,6 +3,7 @@ ETH_NETWORK_NAME=clique
LOG_LEVEL=debug
BATCH_SUBMITTER_LOG_LEVEL=debug
BATCH_SUBMITTER_LOG_TERMINAL=true
BATCH_SUBMITTER_MAX_L1_TX_SIZE=90000
BATCH_SUBMITTER_MAX_BATCH_SUBMISSION_TIME=0
BATCH_SUBMITTER_POLL_INTERVAL=500ms
......
......@@ -23,4 +23,4 @@ curl \
$L2_NODE_WEB3_URL
# go
exec node ./exec/run-message-relayer.js
exec yarn start
export * from './sequencer-batch'
import { ethers } from 'ethers'
import { remove0x, add0x } from './common/hex-strings'
import { remove0x, add0x } from './hex-strings'
/**
* Converts an ethers BigNumber into an equivalent Ethereum address representation.
......
/**
* Common JavaScript/TypeScript utilities
*/
export * from './basic-types'
export * from './bn'
export * from './hex-strings'
export * from './misc'
export * from './test-utils'
......@@ -11,7 +11,12 @@ export const sleep = async (ms: number): Promise<void> => {
})
}
// Returns a copy of an object
/**
* Returns a clone of the object.
*
* @param obj Object to clone.
* @returns Clone of the object.
*/
export const clone = (obj: any): any => {
if (typeof obj === 'undefined') {
throw new Error(`Trying to clone undefined object`)
......@@ -33,6 +38,13 @@ export const reqenv = (name: string): string => {
return value
}
/**
* Loads a variable from the environment and returns a fallback if not found.
*
* @param name Name of the variable to load.
* @param [fallback] Optional value to be returned as fallback.
* @returns Value of the variable as a string, fallback or undefined.
*/
export const getenv = (name: string, fallback?: string): string | undefined => {
return process.env[name] || fallback
}
/**
* Utilities related to BCFG
*/
export * from './bcfg-types'
/**
* Utilities that extend or enhance the ethers.js library
*/
export * from './fallback-provider'
/**
* Utilities related to go-ethereum (Geth)
*/
export * from './geth-types'
export * from './geth'
/**
* Utilities related to specific external projects
*/
export * from './bcfg'
export * from './rollup'
export * from './basic'
export * from './ethers'
export * from './geth'
export * from './coders'
export * from './common'
export * from './watcher'
export * from './l2context'
export * from './types'
export * from './fees'
export * from './provider'
export * from './alias'
export * from './types'
export * from './bn'
export * from './external'
export * from './optimism'
import { ethers } from 'ethers'
import { bnToAddress } from './bn'
import { bnToAddress } from '../common'
// Constant representing the alias to apply to the msg.sender when a contract sends an L1 => L2
// message. We need this aliasing scheme because a contract can be deployed to the same address
......
......@@ -4,7 +4,7 @@
import { BigNumber } from 'ethers'
import { remove0x } from './common'
import { remove0x } from '../common'
const txDataZeroGas = 4
const txDataNonZeroGasEIP2028 = 16
......
/**
* Utils specifically related to Optimism.
*/
export * from './alias'
export * from './batch-encoding'
export * from './fees'
export * from './l2context'
export * from './rollup-types'
export * from './watcher'
import { expect } from './setup'
import { applyL1ToL2Alias, undoL1ToL2Alias } from '../src/alias'
import { applyL1ToL2Alias, undoL1ToL2Alias } from '../src'
describe('address aliasing utils', () => {
describe('applyL1ToL2Alias', () => {
......
import * as fees from '../src/fees'
import { zeroesAndOnes } from '../src'
describe('Fees', () => {
it('should count zeros and ones', () => {
......@@ -10,7 +10,7 @@ describe('Fees', () => {
]
for (const test of cases) {
const [zeros, ones] = fees.zeroesAndOnes(test.input)
const [zeros, ones] = zeroesAndOnes(test.input)
zeros.should.eq(test.zeros)
ones.should.eq(test.ones)
}
......
/* Imports: Internal */
import { expect } from './setup'
import { sleep } from '../src'
import { sleep, clone, reqenv, getenv } from '../src'
describe('sleep', async () => {
it('should return wait input amount of ms', async () => {
......@@ -10,3 +10,83 @@ describe('sleep', async () => {
expect(startTime + 1000 <= endTime).to.deep.equal(true)
})
})
describe('clone', async () => {
it('should return a cloned object', async () => {
const exampleObject = { example: 'Example' }
const clonedObject = clone(exampleObject)
expect(clonedObject).to.not.equal(exampleObject)
expect(JSON.stringify(clonedObject)).to.equal(JSON.stringify(exampleObject))
})
})
describe('reqenv', async () => {
let cachedEnvironment
const temporaryEnvironmentKey = 'testVariable'
const temporaryEnvironment = {
[temporaryEnvironmentKey]: 'This is an environment variable',
}
before(() => {
cachedEnvironment = process.env
process.env = temporaryEnvironment
})
it('should return an existent environment variable', async () => {
const requiredEnvironmentValue = reqenv(temporaryEnvironmentKey)
expect(requiredEnvironmentValue).to.equal(
temporaryEnvironment[temporaryEnvironmentKey]
)
})
it('should throw an error trying to return a variable that does not exist', async () => {
const undeclaredVariableName = 'undeclaredVariable'
const failedReqenv = () => reqenv(undeclaredVariableName)
expect(failedReqenv).to.throw()
})
after(() => {
process.env = cachedEnvironment
})
})
describe('getenv', async () => {
let cachedEnvironment
const temporaryEnvironmentKey = 'testVariable'
const temporaryEnvironment = {
[temporaryEnvironmentKey]: 'This is an environment variable',
}
const fallback = 'fallback'
before(() => {
cachedEnvironment = process.env
process.env = temporaryEnvironment
})
it('should return an existent environment variable', async () => {
const environmentVariable = getenv(temporaryEnvironmentKey)
expect(environmentVariable).to.equal(
temporaryEnvironment[temporaryEnvironmentKey]
)
})
it('should return an existent environment variable even if fallback is passed', async () => {
const environmentVariable = getenv(temporaryEnvironmentKey, fallback)
expect(environmentVariable).to.equal(
temporaryEnvironment[temporaryEnvironmentKey]
)
})
it('should return fallback if variable is not defined', async () => {
const undeclaredVariableName = 'undeclaredVariable'
expect(getenv(undeclaredVariableName, fallback)).to.equal(fallback)
})
it('should return undefined if no fallback is passed and variable is not defined', async () => {
expect(getenv('undeclaredVariable')).to.be.undefined
})
after(() => {
process.env = cachedEnvironment
})
})
......@@ -117,4 +117,5 @@ const main = async () => {
await service.start()
}
export default main
main()
REGEN__STATE_DUMP_FILE=
REGEN__ETHERSCAN_FILE=
REGEN__GENESIS_FILE=
REGEN__OUTPUT_FILE=
REGEN__L2_PROVIDER_URL=
REGEN__ETH_PROVIDER_URL=
REGEN__L1_TESTNET_PROVIDER_URL=
REGEN__L1_TESTNET_PRIVATE_KEY=
REGEN__START_INDEX=
REGEN__END_INDEX=
module.exports = {
extends: '../../.eslintrc.js',
ignorePatterns: ['/data', '/solc-bin', '/solc-cache'],
}
node_modules/
build/
solc-bin/
outputs/
etherscan/
state-dumps/
data/
solc-cache/
module.exports = {
...require('../../.prettierrc.js'),
};
\ No newline at end of file
# @eth-optimism/regenesis-surgery
## 0.2.3
### Patch Changes
- ba14c59d: Updates various ethers dependencies to their latest versions
## 0.2.2
### Patch Changes
- 8e634b49: Fix package JSON issues
## 0.2.1
### Patch Changes
- 243f33e5: Standardize package json file format
## 0.2.0
### Minor Changes
- 8148d2fb: Add regenesis-surgery package and event-indexer script
- 81ccd6e4: `regenesis/0.5.0` release
### Patch Changes
- f9ea95bd: Fixes the compiler cache to prevent collisions between EVM and OVM outputs.
- b70ee70c: upgraded to solidity 0.8.9
- c38e4b57: Minor bugfixes to the regenesis process for OVM_ETH
- a98a1884: Fixes dependencies instead of using caret constraints
(The MIT License)
Copyright 2020-2021 Optimism
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# @eth-optimism/regenesis-surgery
Scripts used to perform the transition process between OVMv1 and OVMv2.
## Installation
```sh
git clone git@github.com:ethereum-optimism/optimism.git
yarn clean
yarn install
yarn build
```
## Usage
1. Open `.env` and add values for all environment variables listed below.
2. Run `yarn start` to start the surgery process.
3. Grab a coffee or something.
## Environment Variables
| Variable | Description |
| ----------------------------- | --------------------------------------------------------------|
| `REGEN__STATE_DUMP_FILE` | Path to the state dump file |
| `REGEN__ETHERSCAN_FILE` | Path to the etherscan dump file |
| `REGEN__GENESIS_FILE` | Path to the initial genesis file |
| `REGEN__OUTPUT_FILE` | Path where the output genesis will be saved |
| `REGEN__L2_PROVIDER_URL` | RPC provider for the L2 network being upgraded |
| `REGEN__ETH_PROVIDER_URL` | RPC provider for Ethereum mainnet |
| `REGEN__ROPSTEN_PROVIDER_URL` | RPC provider for the Ropsten testnet |
| `REGEN__ROPSTEN_PRIVATE_KEY` | Private key of an account that has Ropsten ETH |
| `REGEN__STATE_DUMP_HEIGHT` | Height at which the state dump was taken |
| `REGEN__START_INDEX` | Start index to begin processing the regenesis at (do not set) |
| `REGEN__END_INDEX` | End index to finish processing the regenesis at (do not set) |
{
"private": true,
"name": "@eth-optimism/regenesis-surgery",
"version": "0.2.3",
"description": "[Optimism] Tooling for managing the OVM 1.0 to OVM 2.0 upgrade",
"main": "dist/index",
"types": "dist/index",
"files": [
"dist/*"
],
"scripts": {
"clean": "rimraf ./dist ./tsconfig.build.tsbuildinfo",
"lint": "yarn run lint:fix && yarn run lint:check",
"lint:fix": "yarn lint:check --fix",
"lint:check": "eslint . --max-warnings=0",
"pre-commit": "lint-staged",
"test:surgery": "ts-mocha --timeout 50000000 test/*",
"start": "ts-node ./scripts/surgery.ts"
},
"keywords": [
"optimism",
"ethereum",
"regenesis",
"surgery"
],
"homepage": "https://github.com/ethereum-optimism/optimism/tree/develop/packages/regenesis-surgery#readme",
"license": "MIT",
"author": "Optimism PBC",
"repository": {
"type": "git",
"url": "https://github.com/ethereum-optimism/optimism.git"
},
"devDependencies": {
"@discoveryjs/json-ext": "^0.5.3",
"@eth-optimism/core-utils": "0.7.6",
"@ethersproject/abi": "^5.5.0",
"@ethersproject/abstract-provider": "^5.5.1",
"@ethersproject/bignumber": "^5.5.0",
"@ethersproject/properties": "^5.5.0",
"@ethersproject/providers": "^5.5.3",
"@types/node": "^15.12.2",
"@types/node-fetch": "^3.0.3",
"@typescript-eslint/eslint-plugin": "^4.26.0",
"@typescript-eslint/parser": "^4.26.0",
"@uniswap/sdk-core": "^3.0.1",
"@uniswap/v3-core": "^1.0.0",
"@uniswap/v3-sdk": "^3.5.1",
"babel-eslint": "^10.1.0",
"byline": "^5.0.0",
"chai": "^4.3.4",
"chai-as-promised": "^7.1.1",
"dotenv": "^10.0.0",
"eslint-config-prettier": "^8.3.0",
"eslint-plugin-import": "^2.23.4",
"eslint-plugin-jsdoc": "^35.1.2",
"eslint-plugin-prefer-arrow": "^1.2.3",
"eslint-plugin-prettier": "^3.4.0",
"eslint-plugin-react": "^7.24.0",
"eslint-plugin-unicorn": "^32.0.1",
"ethereum-waffle": "^3.4.0",
"ethereumjs-util": "^7.1.3",
"ethers": "^5.5.4",
"lint-staged": "11.0.0",
"mocha": "^9.1.2",
"node-fetch": "2.6.7",
"solc": "0.8.7-fixed",
"ts-mocha": "^8.0.0",
"ts-node": "^10.0.0"
}
}
import {
EOA_CODE_HASHES,
UNISWAP_V3_FACTORY_ADDRESS,
UNISWAP_V3_NFPM_ADDRESS,
UNISWAP_V3_CONTRACT_ADDRESSES,
UNISWAP_V3_MAINNET_MULTICALL,
PREDEPLOY_WIPE_ADDRESSES,
PREDEPLOY_NO_WIPE_ADDRESSES,
PREDEPLOY_NEW_NOT_ETH_ADDRESSES,
OLD_ETH_ADDRESS,
NEW_ETH_ADDRESS,
ONEINCH_DEPLOYER_ADDRESS,
DELETE_CONTRACTS,
} from './constants'
import { Account, AccountType, SurgeryDataSources } from './types'
import { hexStringEqual, isBytecodeERC20 } from './utils'
export const classifiers: {
[key in AccountType]: (account: Account, data: SurgeryDataSources) => boolean
} = {
[AccountType.ONEINCH_DEPLOYER]: (account) => {
return hexStringEqual(account.address, ONEINCH_DEPLOYER_ADDRESS)
},
[AccountType.DELETE]: (account) => {
return DELETE_CONTRACTS.some((addr) => {
return hexStringEqual(account.address, addr)
})
},
[AccountType.EOA]: (account) => {
// Just in case the account doesn't have a code hash
if (!account.codeHash) {
return false
}
return EOA_CODE_HASHES.some((codeHash) => {
return hexStringEqual(account.codeHash, codeHash)
})
},
[AccountType.PRECOMPILE]: (account) => {
return account.address
.toLowerCase()
.startsWith('0x00000000000000000000000000000000000000')
},
[AccountType.PREDEPLOY_NEW_NOT_ETH]: (account) => {
return PREDEPLOY_NEW_NOT_ETH_ADDRESSES.some((addr) => {
return hexStringEqual(account.address, addr)
})
},
[AccountType.PREDEPLOY_WIPE]: (account) => {
return PREDEPLOY_WIPE_ADDRESSES.some((addr) => {
return hexStringEqual(account.address, addr)
})
},
[AccountType.PREDEPLOY_NO_WIPE]: (account) => {
return PREDEPLOY_NO_WIPE_ADDRESSES.some((addr) => {
return hexStringEqual(account.address, addr)
})
},
[AccountType.PREDEPLOY_ETH]: (account) => {
return hexStringEqual(account.address, NEW_ETH_ADDRESS)
},
[AccountType.PREDEPLOY_WETH]: (account) => {
return hexStringEqual(account.address, OLD_ETH_ADDRESS)
},
[AccountType.UNISWAP_V3_FACTORY]: (account) => {
return hexStringEqual(account.address, UNISWAP_V3_FACTORY_ADDRESS)
},
[AccountType.UNISWAP_V3_NFPM]: (account) => {
return hexStringEqual(account.address, UNISWAP_V3_NFPM_ADDRESS)
},
[AccountType.UNISWAP_V3_MAINNET_MULTICALL]: (account) => {
return hexStringEqual(account.address, UNISWAP_V3_MAINNET_MULTICALL)
},
[AccountType.UNISWAP_V3_POOL]: (account, data) => {
return data.pools.some((pool) => {
return hexStringEqual(pool.oldAddress, account.address)
})
},
[AccountType.UNISWAP_V3_OTHER]: (account) => {
return UNISWAP_V3_CONTRACT_ADDRESSES.some((addr) => {
return hexStringEqual(account.address, addr)
})
},
[AccountType.UNVERIFIED]: (account, data) => {
const found = data.etherscanDump.find(
(c) => c.contractAddress === account.address
)
return found === undefined || found.sourceCode === ''
},
[AccountType.VERIFIED]: (account, data) => {
return !classifiers[AccountType.UNVERIFIED](account, data)
},
[AccountType.ERC20]: (account) => {
return isBytecodeERC20(account.code)
},
}
export const classify = (
account: Account,
data: SurgeryDataSources
): AccountType => {
for (const accountType in AccountType) {
if (!isNaN(Number(accountType))) {
if (classifiers[accountType](account, data)) {
return Number(accountType)
}
}
}
}
import path from 'path'
// Codehashes of OVM_ECDSAContractAccount for 0.3.0 and 0.4.0
export const EOA_CODE_HASHES = [
'0xa73df79c90ba2496f3440188807022bed5c7e2e826b596d22bcb4e127378835a',
'0xef2ab076db773ffc554c9f287134123439a5228e92f5b3194a28fec0a0afafe3',
]
export const UNISWAP_V3_FACTORY_ADDRESS =
'0x1F98431c8aD98523631AE4a59f267346ea31F984'
export const UNISWAP_V3_NFPM_ADDRESS =
'0xC36442b4a4522E871399CD717aBDD847Ab11FE88'
export const UNISWAP_V3_CONTRACT_ADDRESSES = [
// PoolDeployer
'0x569E8D536EC2dD5988857147c9FCC7d8a08a7DBc',
// UniswapV3Factory
'0x1F98431c8aD98523631AE4a59f267346ea31F984',
// ProxyAdmin
'0xB753548F6E010e7e680BA186F9Ca1BdAB2E90cf2',
// TickLens
'0xbfd8137f7d1516D3ea5cA83523914859ec47F573',
// Quoter
'0xb27308f9F90D607463bb33eA1BeBb41C27CE5AB6',
// SwapRouter
'0xE592427A0AEce92De3Edee1F18E0157C05861564',
// NonfungiblePositionLibrary
'0x42B24A95702b9986e82d421cC3568932790A48Ec',
// NonfungibleTokenPositionDescriptor
'0x91ae842A5Ffd8d12023116943e72A606179294f3',
// TransparentUpgradeableProxy
'0xEe6A57eC80ea46401049E92587E52f5Ec1c24785',
// NonfungibleTokenPositionManager
'0xC36442b4a4522E871399CD717aBDD847Ab11FE88',
// UniswapInterfaceMulticall (OP KOVAN)
'0x1F98415757620B543A52E61c46B32eB19261F984',
]
export const UNISWAP_V3_KOVAN_MULTICALL =
'0x1F98415757620B543A52E61c46B32eB19261F984'
export const UNISWAP_V3_MAINNET_MULTICALL =
'0x90f872b3d8f33f305e0250db6A2761B354f7710A'
export const PREDEPLOY_WIPE_ADDRESSES = [
// L2CrossDomainMessenger
'0x4200000000000000000000000000000000000007',
// OVM_GasPriceOracle
'0x420000000000000000000000000000000000000F',
// L2StandardBridge
'0x4200000000000000000000000000000000000010',
// OVM_SequencerFeeVault
'0x4200000000000000000000000000000000000011',
]
export const PREDEPLOY_NO_WIPE_ADDRESSES = [
// OVM_DeployerWhitelist
'0x4200000000000000000000000000000000000002',
// OVM_L2ToL1MessagePasser
'0x4200000000000000000000000000000000000000',
]
export const PREDEPLOY_NEW_NOT_ETH_ADDRESSES = [
// L2StandardTokenFactory
'0x4200000000000000000000000000000000000012',
// OVM_L1BlockNumber
'0x4200000000000000000000000000000000000013',
]
export const OLD_ETH_ADDRESS = '0x4200000000000000000000000000000000000006'
export const NEW_ETH_ADDRESS = '0xDeadDeAddeAddEAddeadDEaDDEAdDeaDDeAD0000'
export const ONEINCH_DEPLOYER_ADDRESS =
'0xee4f7b6c39e7e87af01fb9e4cee0c893ff4d63f2'
export const DELETE_CONTRACTS = [
// 1inch aggregator
'0x11111112542D85B3EF69AE05771c2dCCff4fAa26',
// OVM_L1MessageSender
'0x4200000000000000000000000000000000000001',
// OVM v1 System Contract
'0xDEADDEaDDeAddEADDeaDDEADdeaDdeAddeAd0005',
// OVM v1 System Contract
'0xDEADdeAdDeAddEAdDEaDdEaddEAddeaDdEaD0006',
// OVM v1 System Contract
'0xDeaDDeaDDeaddEADdeaDdEadDeaDdeADDEad0007',
// Uniswap Position
'0x18F7E3ae7202e93984290e1195810c66e1E276FF',
// Uniswap Oracle
'0x17b0f5e5850e7230136df66c5d49497b8c3be0c1',
// Uniswap Tick
'0x47405b0d5f88e16701be6dc8ae185fefaa5dca2f',
// Uniswap TickBitmap
'0x01d95165c3c730d6b40f55c37e24c7aac73d5e6f',
// Uniswap TickMath
'0x308c3e60585ad4eab5b7677be0566fead4cb4746',
// Uniswap SwapMath
'0x198dcc7cd919dd33dd72c3f981df653750901d75',
// Uniswap UniswapV3PoolDeployer
'0x569e8d536ec2dd5988857147c9fcc7d8a08a7dbc',
// Uniswap NFTDescriptor
'0x042f51014b152c2d2fc9b57e36b16bc744065d8c',
]
export const WETH_TRANSFER_ADDRESSES = [
// Rubicon Mainnet bathETH
'0xB0bE5d911E3BD4Ee2A8706cF1fAc8d767A550497',
// Rubicon Mainnet bathETH-USDC
'0x87a7Eed69eaFA78D30344001D0baFF99FC005Dc8',
// Rubicon Mainnet bathETH-DAI
'0x314eC4Beaa694264746e1ae324A5edB913a6F7C6',
// Rubicon Mainnet bathETH-USDT
'0xF6A47B24e80D12Ac7d3b5Cef67B912BCd3377333',
// Rubicon Mainnet exchange
'0x7a512d3609211e719737E82c7bb7271eC05Da70d',
// Rubicon Mainnet bathUSDC
'0xe0e112e8f33d3f437D1F895cbb1A456836125952',
// Rubicon Mainnet bathDAI
'0x60daEC2Fc9d2e0de0577A5C708BcaDBA1458A833',
// Rubicon Mainnet bathUSDT
'0xfFBD695bf246c514110f5DAe3Fa88B8c2f42c411',
// Rubicon Kovan bathETH
'0x5790AedddfB25663f7dd58261De8E96274A82BAd',
// Rubicon Kovan bathETH-USDC
'0x52fBa53c876a47a64A10F111fbeA7Ed506dCc7e7',
// Rubicon Kovan bathETH-DAI
'0xA92E4Bd9f61e90757Cd8806D236580698Fc20C91',
// Rubicon Kovan bathETH-USDT
'0x80D94a6f6b0335Bfed8D04B92423B6Cd14b5d31C',
// Rubicon Kovan market
'0x5ddDa7DF721272106af1904abcc64E76AB2019d2',
// Hop Mainnet AMM Wrapper
'0x86cA30bEF97fB651b8d866D45503684b90cb3312',
// Hop MainnetSwap
'0xaa30d6bba6285d0585722e2440ff89e23ef68864',
// Hop Kovan AMM Wrapper
'0xc9E6628791cdD4ad568550fcc6f378cEF27e98fd',
// Hop Kovan Swap
'0xD6E31cE884DFf44c4600fD9D36BcC9af447C28d5',
// Synthetix Mainnet WETHWrapper
'0x6202a3b0be1d222971e93aab084c6e584c29db70',
]
export const COMPILER_VERSIONS_TO_SOLC = {
'v0.5.16': 'v0.5.16+commit.9c3226ce',
'v0.5.16-alpha.7': 'v0.5.16+commit.9c3226ce',
'v0.6.12': 'v0.6.12+commit.27d51765',
'v0.7.6': 'v0.7.6+commit.7338295f',
'v0.7.6+commit.3b061308': 'v0.7.6+commit.7338295f',
'v0.7.6-allow_kall': 'v0.7.6+commit.7338295f',
'v0.7.6-no_errors': 'v0.7.6+commit.7338295f',
'v0.8.4': 'v0.8.4+commit.c7e474f2',
}
export const SOLC_BIN_PATH = 'https://binaries.soliditylang.org'
export const EMSCRIPTEN_BUILD_PATH = `${SOLC_BIN_PATH}/emscripten-wasm32`
export const EMSCRIPTEN_BUILD_LIST = `${EMSCRIPTEN_BUILD_PATH}/list.json`
export const LOCAL_SOLC_DIR = path.join(__dirname, '..', 'solc-bin')
export const EVM_SOLC_CACHE_DIR = path.join(
__dirname,
'..',
'solc-cache',
'evm'
)
export const OVM_SOLC_CACHE_DIR = path.join(
__dirname,
'..',
'solc-cache',
'ovm'
)
import { ethers } from 'ethers'
import {
computePoolAddress,
POOL_INIT_CODE_HASH,
POOL_INIT_CODE_HASH_OPTIMISM,
POOL_INIT_CODE_HASH_OPTIMISM_KOVAN,
} from '@uniswap/v3-sdk'
import { Token } from '@uniswap/sdk-core'
import { UNISWAP_V3_FACTORY_ADDRESS } from './constants'
import { downloadAllSolcVersions } from './solc'
import {
PoolHashCache,
StateDump,
UniswapPoolData,
SurgeryDataSources,
EtherscanContract,
SurgeryConfigs,
GenesisFile,
} from './types'
import {
loadConfigs,
checkStateDump,
readDumpFile,
readEtherscanFile,
readGenesisFile,
getUniswapV3Factory,
getMappingKey,
} from './utils'
export const getUniswapPoolData = async (
l2Provider: ethers.providers.BaseProvider,
network: 'mainnet' | 'kovan'
): Promise<UniswapPoolData[]> => {
if (!network) {
throw new Error('Must provide network "mainnet" or "kovan"')
}
const UniswapV3Factory = getUniswapV3Factory(l2Provider)
const pools: UniswapPoolData[] = []
const poolEvents = await UniswapV3Factory.queryFilter('PoolCreated' as any)
for (const event of poolEvents) {
// Compute the old pool address using the OVM init code hash.
const oldPoolAddress = computePoolAddress({
factoryAddress: UNISWAP_V3_FACTORY_ADDRESS,
tokenA: new Token(0, event.args.token0, 18),
tokenB: new Token(0, event.args.token1, 18),
fee: event.args.fee,
initCodeHashManualOverride:
network === 'mainnet'
? POOL_INIT_CODE_HASH_OPTIMISM
: POOL_INIT_CODE_HASH_OPTIMISM_KOVAN,
}).toLowerCase()
// Compute the new pool address using the EVM init code hash.
const newPoolAddress = computePoolAddress({
factoryAddress: UNISWAP_V3_FACTORY_ADDRESS,
tokenA: new Token(0, event.args.token0, 18),
tokenB: new Token(0, event.args.token1, 18),
fee: event.args.fee,
initCodeHashManualOverride: POOL_INIT_CODE_HASH,
}).toLowerCase()
pools.push({
oldAddress: oldPoolAddress,
newAddress: newPoolAddress,
token0: event.args.token0,
token1: event.args.token1,
fee: event.args.fee,
})
}
return pools
}
export const makePoolHashCache = (pools: UniswapPoolData[]): PoolHashCache => {
const cache: PoolHashCache = {}
for (const pool of pools) {
for (let i = 0; i < 1000; i++) {
cache[getMappingKey([pool.oldAddress], i)] = {
pool,
index: i,
}
}
}
return cache
}
const getChainId = async (
provider: ethers.providers.JsonRpcProvider
): Promise<number> => {
const response = await provider.send('eth_chainId', [])
return ethers.BigNumber.from(response).toNumber()
}
export const loadSurgeryData = async (
configs?: SurgeryConfigs
): Promise<SurgeryDataSources> => {
// First download every solc version that we'll need during this surgery.
console.log('Downloading all required solc versions...')
await downloadAllSolcVersions()
// Load the configuration values, will throw if anything is missing.
if (configs === undefined) {
console.log('Loading configuration values...')
configs = loadConfigs()
}
// Get a reference to an ETH (mainnet) provider.
console.log('Connecting to ETH provider...')
const ethProvider = new ethers.providers.JsonRpcProvider(
configs.ethProviderUrl
)
const mainnetChainId = await getChainId(ethProvider)
if (mainnetChainId !== 1) {
throw new Error(
`Mainnet chain id incorrect, got ${mainnetChainId} and expected 1`
)
}
// Get a reference to the L2 provider so we can load pool data.
// Do validation on the chain id before reading data from disk
// because that is slow
console.log('Connecting to L2 provider...')
const l2Provider = new ethers.providers.JsonRpcProvider(configs.l2ProviderUrl)
const l2ChainId = await getChainId(l2Provider)
if (l2ChainId === 10) {
configs.l2NetworkName = 'mainnet'
} else if (l2ChainId === 69) {
configs.l2NetworkName = 'kovan'
} else {
throw new Error(`Unknown l2 chain id: ${l2ChainId}`)
}
console.log(`Using network ${configs.l2NetworkName}`)
// Load and validate the state dump.
console.log('Loading and validating state dump file...')
const dump: StateDump = await readDumpFile(configs.stateDumpFilePath)
checkStateDump(dump)
console.log(`${dump.length} entries in state dump`)
// Load the genesis file.
console.log('Loading genesis file...')
const genesis: GenesisFile = await readGenesisFile(configs.genesisFilePath)
if (genesis.config.chainId !== l2ChainId) {
// Don't throw here because we might need to do a staging environment with a different chain ID
console.log(
`WARNING: Genesis File at ${configs.genesisFilePath} has chain id mismatch with remote L2 node` +
` got ${genesis.config.chainId} locally and ${l2ChainId} remotely`
)
}
const genesisDump: StateDump = []
for (const [address, account] of Object.entries(genesis.alloc)) {
genesisDump.push({
address,
...account,
})
}
console.log(`${genesisDump.length} entries in genesis file`)
// Load the etherscan dump.
console.log('Loading etherscan dump file...')
const etherscanDump: EtherscanContract[] = await readEtherscanFile(
configs.etherscanFilePath
)
console.log(`${etherscanDump.length} entries in etherscan dump`)
// Load the pool data.
console.log('Loading Uniswap pool data...')
const pools: UniswapPoolData[] = await getUniswapPoolData(
l2Provider,
configs.l2NetworkName
)
console.log(`${pools.length} uniswap pools`)
console.log('Generating pool cache...')
const poolHashCache = makePoolHashCache(pools)
// Get a reference to the ropsten provider and wallet, used for deploying Uniswap pools.
console.log('Connecting to ropsten provider...')
const ropstenProvider = new ethers.providers.JsonRpcProvider(
configs.ropstenProviderUrl
)
const ropstenWallet = new ethers.Wallet(
configs.ropstenPrivateKey,
ropstenProvider
)
const ropstenChainId = await ropstenWallet.getChainId()
if (ropstenChainId !== 3) {
throw new Error(
`Ropsten chain id incorrect, got ${ropstenChainId} and expected 3`
)
}
return {
configs,
dump,
genesis,
genesisDump,
pools,
poolHashCache,
etherscanDump,
ropstenProvider,
ropstenWallet,
l2Provider,
ethProvider,
}
}
This diff is collapsed.
/* eslint @typescript-eslint/no-var-requires: "off" */
import fs from 'fs'
import path from 'path'
import fetch from 'node-fetch'
import { ethers } from 'ethers'
import { clone } from '@eth-optimism/core-utils'
import setupMethods from 'solc/wrapper'
import {
COMPILER_VERSIONS_TO_SOLC,
EMSCRIPTEN_BUILD_LIST,
EMSCRIPTEN_BUILD_PATH,
LOCAL_SOLC_DIR,
EVM_SOLC_CACHE_DIR,
OVM_SOLC_CACHE_DIR,
} from './constants'
import { EtherscanContract } from './types'
const OVM_BUILD_PATH = (version: string) => {
return `https://raw.githubusercontent.com/ethereum-optimism/solc-bin/9455107699d2f7ad9b09e1005c7c07f4b5dd6857/bin/soljson-${version}.js`
}
/**
* Downloads a specific solc version.
*
* @param version Solc version to download.
* @param ovm If true, downloads from the OVM repository.
*/
export const downloadSolc = async (version: string, ovm?: boolean) => {
// TODO: why is this one missing?
if (version === 'v0.5.16-alpha.7') {
return
}
// File is the location where we'll put the downloaded compiler.
let file: string
// Remote is the URL we'll query if the file doesn't already exist.
let remote: string
// Exact file/remote will depend on if downloading OVM or EVM compiler.
if (ovm) {
file = `${path.join(LOCAL_SOLC_DIR, version)}.js`
remote = OVM_BUILD_PATH(version)
} else {
const res = await fetch(EMSCRIPTEN_BUILD_LIST)
const data: any = await res.json()
const list = data.builds
// Make sure the target version actually exists
let target: any
for (const entry of list) {
const longVersion = `v${entry.longVersion}`
if (version === longVersion) {
target = entry
}
}
// Error out if the given version can't be found
if (!target) {
throw new Error(`Cannot find compiler version ${version}`)
}
file = path.join(LOCAL_SOLC_DIR, target.path)
remote = `${EMSCRIPTEN_BUILD_PATH}/${target.path}`
}
try {
// Check to see if we already have the file
fs.accessSync(file, fs.constants.F_OK)
} catch (e) {
console.error(`Downloading ${version} ${ovm ? 'ovm' : 'solidity'}`)
// If we don't have the file, download it
const res = await fetch(remote)
const bin = await res.text()
fs.writeFileSync(file, bin)
}
}
/**
* Downloads all required solc versions, if not already downloaded.
*/
export const downloadAllSolcVersions = async () => {
try {
fs.mkdirSync(LOCAL_SOLC_DIR)
} catch (e) {
// directory already exists
}
// Keys are OVM versions.
await Promise.all(
// Use a set to dedupe the list of versions.
[...new Set(Object.keys(COMPILER_VERSIONS_TO_SOLC))].map(
async (version) => {
await downloadSolc(version, true)
}
)
)
// Values are EVM versions.
await Promise.all(
// Use a set to dedupe the list of versions.
[...new Set(Object.values(COMPILER_VERSIONS_TO_SOLC))].map(
async (version) => {
await downloadSolc(version)
}
)
)
}
export const getMainContract = (contract: EtherscanContract, output) => {
if (contract.contractFileName) {
return clone(
output.contracts[contract.contractFileName][contract.contractName]
)
}
return clone(output.contracts.file[contract.contractName])
}
export const getSolc = (version: string, ovm?: boolean) => {
return setupMethods(
require(path.join(
LOCAL_SOLC_DIR,
ovm ? version : `solc-emscripten-wasm32-${version}.js`
))
)
}
export const solcInput = (contract: EtherscanContract) => {
// Create a base solc input object
const input = {
language: 'Solidity',
sources: {
file: {
content: contract.sourceCode,
},
},
settings: {
outputSelection: {
'*': {
'*': ['*'],
},
},
optimizer: {
enabled: contract.optimizationUsed === '1',
runs: parseInt(contract.runs, 10),
},
},
}
try {
// source code may be one of 3 things
// - raw content string
// - sources object
// - entire input
let sourceCode = contract.sourceCode
// Remove brackets that are wrapped around the source
// when trying to parse json
if (sourceCode.substr(0, 2) === '{{') {
// Trim the first and last bracket
sourceCode = sourceCode.slice(1, -1)
}
// If the source code is valid json, and
// has the keys of a solc input, just return it
const json = JSON.parse(sourceCode)
// If the json has language, then it is the whole input
if (json.language) {
return json
}
// Add the json file as the sources
input.sources = json
} catch (e) {
//
}
return input
}
const readCompilerCache = (
target: 'evm' | 'ovm',
hash: string
): any | undefined => {
try {
const cacheDir = target === 'evm' ? EVM_SOLC_CACHE_DIR : OVM_SOLC_CACHE_DIR
return JSON.parse(
fs.readFileSync(path.join(cacheDir, hash), {
encoding: 'utf-8',
})
)
} catch (err) {
return undefined
}
}
const writeCompilerCache = (
target: 'evm' | 'ovm',
hash: string,
content: any
) => {
const cacheDir = target === 'evm' ? EVM_SOLC_CACHE_DIR : OVM_SOLC_CACHE_DIR
fs.writeFileSync(path.join(cacheDir, hash), JSON.stringify(content))
}
export const compile = (opts: {
contract: EtherscanContract
ovm: boolean
}): any => {
try {
fs.mkdirSync(EVM_SOLC_CACHE_DIR, {
recursive: true,
})
} catch (e) {
// directory already exists
}
try {
fs.mkdirSync(OVM_SOLC_CACHE_DIR, {
recursive: true,
})
} catch (e) {
// directory already exists
}
let version: string
if (opts.ovm) {
version = opts.contract.compilerVersion
} else {
version = COMPILER_VERSIONS_TO_SOLC[opts.contract.compilerVersion]
if (!version) {
throw new Error(
`Unable to find solc version ${opts.contract.compilerVersion}`
)
}
}
const solcInstance = getSolc(version, opts.ovm)
const input = JSON.stringify(solcInput(opts.contract))
const inputHash = ethers.utils.solidityKeccak256(['string'], [input])
const compilerTarget = opts.ovm ? 'ovm' : 'evm'
// Cache the compiler output to speed up repeated compilations of the same contract. If this
// cache is too memory intensive, then we could consider only caching if the contract has been
// seen more than once.
let output = readCompilerCache(compilerTarget, inputHash)
if (output === undefined) {
output = JSON.parse(solcInstance.compile(input))
writeCompilerCache(compilerTarget, inputHash, output)
}
if (!output.contracts) {
throw new Error(`Cannot compile ${opts.contract.contractAddress}`)
}
const mainOutput = getMainContract(opts.contract, output)
if (!mainOutput) {
throw new Error(
`Contract filename mismatch: ${opts.contract.contractAddress}`
)
}
return mainOutput
}
import fs from 'fs'
import { ethers } from 'ethers'
import { add0x, remove0x, clone } from '@eth-optimism/core-utils'
import { StateDump, SurgeryDataSources, AccountType } from './types'
import { findAccount } from './utils'
import { handlers } from './handlers'
import { classify } from './classifiers'
import { loadSurgeryData } from './data'
const doGenesisSurgery = async (
data: SurgeryDataSources
): Promise<StateDump> => {
// We'll generate the final genesis file from this output.
const output: StateDump = []
// Handle each account in the state dump.
const input = data.dump.slice(data.configs.startIndex, data.configs.endIndex)
// Insert any accounts in the genesis that aren't already in the state dump.
for (const account of data.genesisDump) {
if (findAccount(input, account.address) === undefined) {
input.push(account)
}
}
for (const [i, account] of input.entries()) {
const accountType = classify(account, data)
console.log(
`[${i}/${input.length}] ${AccountType[accountType]}: ${account.address}`
)
const handler = handlers[accountType]
const newAccount = await handler(clone(account), data)
if (newAccount !== undefined) {
output.push(newAccount)
}
}
// Clean up and standardize the dump. Also performs a few tricks to reduce the overall size of
// the state dump, which reduces bandwidth requirements.
console.log('Cleaning up and standardizing dump format...')
for (const account of output) {
for (const [key, val] of Object.entries(account)) {
// We want to be left with the following fields:
// - balance
// - nonce
// - code
// - storage (if necessary)
if (key === 'storage') {
if (Object.keys(account[key]).length === 0) {
// We don't need storage if there are no storage values.
delete account[key]
} else {
// We can remove 0x from storage keys and vals to save space.
for (const [storageKey, storageVal] of Object.entries(account[key])) {
delete account.storage[storageKey]
account.storage[remove0x(storageKey)] = remove0x(storageVal)
}
}
} else if (key === 'code') {
// Code MUST start with 0x.
account[key] = add0x(val)
} else if (key === 'codeHash' || key === 'root') {
// Neither of these fields are necessary. Geth will automatically generate them from the
// code and storage.
delete account[key]
} else if (key === 'balance' || key === 'nonce') {
// At this point we know that the input is either a string or a number. If it's a number,
// we want to convert it into a string.
let stripped = typeof val === 'number' ? val.toString(16) : val
// Remove 0x so we can strip any leading zeros.
stripped = remove0x(stripped)
// We can further reduce our genesis size by removing leading zeros. We can even go as far
// as removing the entire string because Geth appears to treat the empty string as 0.
stripped = stripped.replace().replace(/^0+/, '')
// We have to add 0x if the value is greater or equal to than 10 because Geth will throw an
// error otherwise.
if (stripped !== '' && ethers.BigNumber.from(add0x(stripped)).gte(10)) {
stripped = add0x(stripped)
}
account[key] = stripped
} else if (key === 'address') {
// Keep the address as-is, we'll delete it eventually.
} else {
throw new Error(`unexpected account field: ${key}`)
}
}
}
return output
}
const main = async () => {
// Load the surgery data.
const data = await loadSurgeryData()
// Do the surgery process and get the new genesis dump.
console.log('Starting surgery process...')
const finalGenesisDump = await doGenesisSurgery(data)
// Convert to the format that Geth expects.
console.log('Converting dump to final format...')
const finalGenesisAlloc = {}
for (const account of finalGenesisDump) {
const address = account.address
delete account.address
finalGenesisAlloc[remove0x(address)] = account
}
// Attach all of the original genesis configuration values.
const finalGenesis = {
...data.genesis,
alloc: finalGenesisAlloc,
}
// Write the final genesis file to disk.
console.log('Writing final genesis to disk...')
fs.writeFileSync(
data.configs.outputFilePath,
JSON.stringify(finalGenesis, null, 2)
)
console.log('All done!')
}
main()
import { ethers } from 'ethers'
export interface SurgeryConfigs {
stateDumpFilePath: string
etherscanFilePath: string
genesisFilePath: string
outputFilePath: string
l2NetworkName?: SupportedNetworks
l2ProviderUrl: string
ropstenProviderUrl: string
ropstenPrivateKey: string
ethProviderUrl: string
stateDumpHeight: number
startIndex: number
endIndex: number
}
export interface Account {
address: string
nonce: number | string
balance: string
codeHash?: string
root?: string
code?: string
storage?: {
[key: string]: string
}
}
export type StateDump = Account[]
export interface GethStateDump {
[address: string]: {
nonce: number
balance: string
codeHash: string
root: string
code?: string
storage?: {
[key: string]: string
}
}
}
export enum AccountType {
ONEINCH_DEPLOYER,
DELETE,
EOA,
PRECOMPILE,
PREDEPLOY_NEW_NOT_ETH,
PREDEPLOY_WIPE,
PREDEPLOY_NO_WIPE,
PREDEPLOY_ETH,
PREDEPLOY_WETH,
UNISWAP_V3_FACTORY,
UNISWAP_V3_NFPM,
UNISWAP_V3_MAINNET_MULTICALL,
UNISWAP_V3_POOL,
UNISWAP_V3_OTHER,
UNVERIFIED,
VERIFIED,
ERC20,
}
export interface UniswapPoolData {
oldAddress: string
newAddress: string
token0: string
token1: string
fee: ethers.BigNumber
}
export interface EtherscanContract {
contractAddress: string
code: string
hash: string
sourceCode: string
creationCode: string
contractFileName: string
contractName: string
compilerVersion: string
optimizationUsed: string
runs: string
constructorArguments: string
library: string
}
export type EtherscanDump = EtherscanContract[]
export type SupportedNetworks = 'mainnet' | 'kovan'
export interface SurgeryDataSources {
configs: SurgeryConfigs
dump: StateDump
genesis: GenesisFile
genesisDump: StateDump
pools: UniswapPoolData[]
poolHashCache: PoolHashCache
etherscanDump: EtherscanContract[]
ropstenProvider: ethers.providers.JsonRpcProvider
ropstenWallet: ethers.Wallet
l2Provider: ethers.providers.JsonRpcProvider
ethProvider: ethers.providers.JsonRpcProvider
}
export interface GenesisFile {
config: {
chainId: number
homesteadBlock: number
eip150Block: number
eip155Block: number
eip158Block: number
byzantiumBlock: number
constantinopleBlock: number
petersburgBlock: number
istanbulBlock: number
muirGlacierBlock: number
clique: {
period: number
epoch: number
}
}
difficulty: string
gasLimit: string
extraData: string
alloc: GethStateDump
}
export interface ImmutableReference {
start: number
length: number
}
export interface ImmutableReferences {
[key: string]: ImmutableReference[]
}
export interface PoolHashCache {
[key: string]: {
pool: UniswapPoolData
index: number
}
}
/* eslint @typescript-eslint/no-var-requires: "off" */
import { createReadStream } from 'fs'
import * as fs from 'fs'
import * as assert from 'assert'
import { ethers } from 'ethers'
import { abi as UNISWAP_FACTORY_ABI } from '@uniswap/v3-core/artifacts/contracts/UniswapV3Factory.sol/UniswapV3Factory.json'
import { Interface } from '@ethersproject/abi'
import { parseChunked } from '@discoveryjs/json-ext'
import byline from 'byline'
import * as dotenv from 'dotenv'
import { reqenv, getenv, remove0x } from '@eth-optimism/core-utils'
import {
Account,
EtherscanContract,
StateDump,
SurgeryConfigs,
GenesisFile,
} from './types'
import { UNISWAP_V3_FACTORY_ADDRESS } from './constants'
export const findAccount = (dump: StateDump, address: string): Account => {
return dump.find((acc) => {
return hexStringEqual(acc.address, address)
})
}
export const hexStringIncludes = (a: string, b: string): boolean => {
if (!ethers.utils.isHexString(a)) {
throw new Error(`not a hex string: ${a}`)
}
if (!ethers.utils.isHexString(b)) {
throw new Error(`not a hex string: ${b}`)
}
return a.slice(2).toLowerCase().includes(b.slice(2).toLowerCase())
}
export const hexStringEqual = (a: string, b: string): boolean => {
if (!ethers.utils.isHexString(a)) {
throw new Error(`not a hex string: ${a}`)
}
if (!ethers.utils.isHexString(b)) {
throw new Error(`not a hex string: ${b}`)
}
return a.toLowerCase() === b.toLowerCase()
}
export const replaceWETH = (code: string): string => {
return code.replace(
/c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2/g,
'4200000000000000000000000000000000000006'
)
}
/**
* Left-pads a hex string with zeroes to 32 bytes.
*
* @param val Value to hex pad to 32 bytes.
* @returns Value padded to 32 bytes.
*/
export const toHex32 = (val: string | number | ethers.BigNumber) => {
return ethers.utils.hexZeroPad(ethers.BigNumber.from(val).toHexString(), 32)
}
export const transferStorageSlot = (opts: {
account: Account
oldSlot: string | number
newSlot: string | number
newValue?: string
}): void => {
if (opts.account.storage === undefined) {
throw new Error(`account has no storage: ${opts.account.address}`)
}
if (typeof opts.oldSlot !== 'string') {
opts.oldSlot = toHex32(opts.oldSlot)
}
if (typeof opts.newSlot !== 'string') {
opts.newSlot = toHex32(opts.newSlot)
}
const oldSlotVal = opts.account.storage[opts.oldSlot]
if (oldSlotVal === undefined) {
throw new Error(
`old slot not found in state dump, address=${opts.account.address}, slot=${opts.oldSlot}`
)
}
if (opts.newValue === undefined) {
opts.account.storage[opts.newSlot] = oldSlotVal
} else {
if (opts.newValue.startsWith('0x')) {
opts.newValue = opts.newValue.slice(2)
}
opts.account.storage[opts.newSlot] = opts.newValue
}
delete opts.account.storage[opts.oldSlot]
}
export const getMappingKey = (keys: any[], slot: number) => {
// TODO: assert keys.length > 0
let key = ethers.utils.keccak256(
ethers.utils.hexConcat([toHex32(keys[0]), toHex32(slot)])
)
if (keys.length > 1) {
for (let i = 1; i < keys.length; i++) {
key = ethers.utils.keccak256(
ethers.utils.hexConcat([toHex32(keys[i]), key])
)
}
}
return key
}
// ERC20 interface
const iface = new Interface([
'function balanceOf(address)',
'function name()',
'function symbol()',
'function decimals()',
'function totalSupply()',
'function transfer(address,uint256)',
])
// PUSH4 should prefix any 4 byte selector
const PUSH4 = 0x63
const erc20Sighashes = new Set()
// Build the set of erc20 4 byte selectors
for (const fn of Object.keys(iface.functions)) {
const sighash = iface.getSighash(fn)
erc20Sighashes.add(sighash)
}
export const isBytecodeERC20 = (bytecode: string): boolean => {
if (bytecode === '0x' || bytecode === undefined) {
return false
}
const seen = new Set()
const buf = Buffer.from(remove0x(bytecode), 'hex')
for (const [i, byte] of buf.entries()) {
// Track all of the observed 4 byte selectors that follow a PUSH4
// and are also present in the set of erc20Sighashes
if (byte === PUSH4) {
const sighash = '0x' + buf.slice(i + 1, i + 5).toString('hex')
if (erc20Sighashes.has(sighash)) {
seen.add(sighash)
}
}
}
// create a set that contains those elements of set
// erc20Sighashes that are not in set seen
const elements = [...erc20Sighashes].filter((x) => !seen.has(x))
return !elements.length
}
export const getUniswapV3Factory = (signerOrProvider: any): ethers.Contract => {
return new ethers.Contract(
UNISWAP_V3_FACTORY_ADDRESS,
UNISWAP_FACTORY_ABI,
signerOrProvider
)
}
export const loadConfigs = (): SurgeryConfigs => {
dotenv.config()
const stateDumpFilePath = reqenv('REGEN__STATE_DUMP_FILE')
const etherscanFilePath = reqenv('REGEN__ETHERSCAN_FILE')
const genesisFilePath = reqenv('REGEN__GENESIS_FILE')
const outputFilePath = reqenv('REGEN__OUTPUT_FILE')
const l2ProviderUrl = reqenv('REGEN__L2_PROVIDER_URL')
const ropstenProviderUrl = reqenv('REGEN__ROPSTEN_PROVIDER_URL')
const ropstenPrivateKey = reqenv('REGEN__ROPSTEN_PRIVATE_KEY')
const ethProviderUrl = reqenv('REGEN__ETH_PROVIDER_URL')
const stateDumpHeight = parseInt(reqenv('REGEN__STATE_DUMP_HEIGHT'), 10)
const startIndex = parseInt(getenv('REGEN__START_INDEX', '0'), 10)
const endIndex = parseInt(getenv('REGEN__END_INDEX', '0'), 10) || Infinity
return {
stateDumpFilePath,
etherscanFilePath,
genesisFilePath,
outputFilePath,
l2ProviderUrl,
ropstenProviderUrl,
ropstenPrivateKey,
ethProviderUrl,
stateDumpHeight,
startIndex,
endIndex,
}
}
/**
* Reads the state dump file into an object. Required because the dumps get quite large.
* JavaScript throws an error when trying to load large JSON files (>512mb) directly via
* fs.readFileSync. Need a streaming approach instead.
*
* @param dumppath Path to the state dump file.
* @returns Parsed state dump object.
*/
export const readDumpFile = async (dumppath: string): Promise<StateDump> => {
return new Promise<StateDump>((resolve) => {
const dump: StateDump = []
const stream = byline(fs.createReadStream(dumppath, { encoding: 'utf8' }))
let isFirstRow = true
stream.on('data', (line: any) => {
const account = JSON.parse(line)
if (isFirstRow) {
isFirstRow = false
} else {
delete account.key
dump.push(account)
}
})
stream.on('end', () => {
resolve(dump)
})
})
}
export const readEtherscanFile = async (
etherscanpath: string
): Promise<EtherscanContract[]> => {
return parseChunked(createReadStream(etherscanpath))
}
export const readGenesisFile = async (
genesispath: string
): Promise<GenesisFile> => {
return JSON.parse(fs.readFileSync(genesispath, 'utf8'))
}
export const readGenesisStateDump = async (
genesispath: string
): Promise<StateDump> => {
const genesis = await readGenesisFile(genesispath)
const genesisDump: StateDump = []
for (const [address, account] of Object.entries(genesis.alloc)) {
genesisDump.push({
address,
...account,
})
}
return genesisDump
}
export const checkStateDump = (dump: StateDump) => {
for (const account of dump) {
assert.equal(
account.address.toLowerCase(),
account.address,
`unexpected upper case character in state dump address: ${account.address}`
)
assert.ok(
typeof account.nonce === 'number',
`nonce is not a number: ${account.nonce}`
)
if (account.codeHash) {
assert.equal(
account.codeHash.toLowerCase(),
account.codeHash,
`unexpected upper case character in state dump codeHash: ${account.codeHash}`
)
}
if (account.root) {
assert.equal(
account.root.toLowerCase(),
account.root,
`unexpected upper case character in state dump root: ${account.root}`
)
}
if (account.code) {
assert.equal(
account.code.toLowerCase(),
account.code,
`unexpected upper case character in state dump code: ${account.code}`
)
}
// All accounts other than precompiles should have a balance of zero.
if (
!account.address.startsWith('0x00000000000000000000000000000000000000')
) {
assert.equal(
account.balance,
'0',
`unexpected non-zero balance in state dump address: ${account.address}`
)
}
if (account.storage !== undefined) {
for (const [storageKey, storageVal] of Object.entries(account.storage)) {
assert.equal(
storageKey.toLowerCase(),
storageKey,
`unexpected upper case character in state dump storage key: ${storageKey}`
)
assert.equal(
storageVal.toLowerCase(),
storageVal,
`unexpected upper case character in state dump storage value: ${storageVal}`
)
}
}
}
}
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment