Commit bf4d0c33 authored by Mark Tyneway's avatar Mark Tyneway Committed by GitHub

Merge pull request #2071 from ethereum-optimism/develop

Develop -> Master PR
parents 6f8e4325 c59c3d38
---
'@eth-optimism/proxyd': minor
---
proxyd: Allow cached RPCs to be evicted by redis
---
'@eth-optimism/core-utils': patch
---
Improved docstrings for BCFG typings
---
'@eth-optimism/integration-tests': minor
---
Updates to work with a live network
---
'@eth-optimism/batch-submitter-service': patch
---
Adds confirmation depth awareness to txmgr
---
'@eth-optimism/l2geth': patch
---
Add a better error message for when the sequencer url is not configured when proxying user requests to the sequencer for `eth_sendRawTransaction` when running as a verifier/replica
---
'@eth-optimism/proxyd': minor
---
Add caching for block-dependent RPCs
---
'@eth-optimism/proxyd': minor
---
proxyd: Cache block-dependent RPCs
---
'@eth-optimism/l2geth': patch
---
Fix nonce issue
---
'@eth-optimism/integration-tests': patch
---
Use hardhat-ethers for importing factories in integration tests
---
'@eth-optimism/l2geth': patch
---
Add reinitialize-by-url command, add dump chain state command
---
'@eth-optimism/core-utils': patch
---
Cleans up the internal file and folder structure for the typings exported by core-utils
---
'@eth-optimism/integration-tests': patch
---
Split OVMMulticall.sol into Multicall.sol & OVMContext.sol
---
'@eth-optimism/batch-submitter-service': minor
---
Add multi-tx support, clear pending txs on startup
---
'@eth-optimism/l2geth': patch
---
Fix blocknumber monotonicity logging bug
---
'@eth-optimism/proxyd': minor
---
Add integration tests and batching
...@@ -141,6 +141,32 @@ jobs: ...@@ -141,6 +141,32 @@ jobs:
kubectl rollout restart statefulset nightly-dtl --namespace nightly kubectl rollout restart statefulset nightly-dtl --namespace nightly
kubectl rollout restart deployment nightly-gas-oracle --namespace nightly kubectl rollout restart deployment nightly-gas-oracle --namespace nightly
kubectl rollout restart deployment edge-proxyd --namespace nightly kubectl rollout restart deployment edge-proxyd --namespace nightly
run-itests-nightly:
docker:
- image: cimg/base:2021.04
steps:
- setup_remote_docker:
version: 19.03.13
- run:
name: Run integration tests
command: |
docker run \
--env PRIVATE_KEY=$NIGHTLY_ITESTS_PRIVKEY \
--env L1_URL=https://nightly-l1.optimism-stacks.net \
--env L2_URL=https://nightly-l2.optimism-stacks.net \
--env ADDRESS_MANAGER=0x22D4E211ef8704f2ca2d6dfdB32125E2530ACE3e \
--env L2_CHAINID=69 \
--env MOCHA_BAIL=true \
--env MOCHA_TIMEOUT=300000 \
--env L1_GAS_PRICE=onchain \
--env L2_GAS_PRICE=onchain \
--env RUN_DEBUG_TRACE_TESTS=false \
--env RUN_REPLICA_TESTS=false \
--env RUN_STRESS_TESTS=false \
--env OVMCONTEXT_SPEC_NUM_TXS=1 \
--env DTL_ENQUEUE_CONFIRMATIONS=12 \
"$STACKMAN_REPO/integration-tests:nightly" \
yarn test:integration:live
notify: notify:
docker: docker:
- image: cimg/base:2021.04 - image: cimg/base:2021.04
...@@ -152,6 +178,18 @@ jobs: ...@@ -152,6 +178,18 @@ jobs:
workflows: workflows:
nightly-itests:
triggers:
- schedule:
cron: "0 1 * * * "
filters:
branches:
only:
- develop
jobs:
- run-itests-nightly:
context:
- optimism
nightly: nightly:
triggers: triggers:
- schedule: - schedule:
......
...@@ -101,7 +101,17 @@ module.exports = { ...@@ -101,7 +101,17 @@ module.exports = {
'id-match': 'off', 'id-match': 'off',
'import/no-extraneous-dependencies': ['error'], 'import/no-extraneous-dependencies': ['error'],
'import/no-internal-modules': 'off', 'import/no-internal-modules': 'off',
'import/order': 'off', 'import/order': [
"error",
{
groups: [
'builtin',
'external',
'internal',
],
'newlines-between': 'always',
},
],
indent: 'off', indent: 'off',
'jsdoc/check-alignment': 'error', 'jsdoc/check-alignment': 'error',
'jsdoc/check-indentation': 'error', 'jsdoc/check-indentation': 'error',
......
name: proxyd unit tests
on:
push:
branches:
- 'master'
- 'develop'
pull_request:
paths:
- 'go/proxyd/**'
workflow_dispatch:
defaults:
run:
working-directory: ./go/proxyd
jobs:
tests:
runs-on: ubuntu-latest
steps:
- name: Install Go
uses: actions/setup-go@v2
with:
go-version: 1.15.x
- name: Checkout code
uses: actions/checkout@v2
- name: Build
run: make proxyd
- name: Lint
run: make lint
- name: Test
run: make test
...@@ -29,6 +29,7 @@ jobs: ...@@ -29,6 +29,7 @@ jobs:
rpc-proxy : ${{ steps.packages.outputs.rpc-proxy }} rpc-proxy : ${{ steps.packages.outputs.rpc-proxy }}
op-exporter : ${{ steps.packages.outputs.op-exporter }} op-exporter : ${{ steps.packages.outputs.op-exporter }}
l2geth-exporter : ${{ steps.packages.outputs.l2geth-exporter }} l2geth-exporter : ${{ steps.packages.outputs.l2geth-exporter }}
batch-submitter-service : ${{ steps.packages.outputs.batch-submitter-service }}
steps: steps:
- name: Check out source code - name: Check out source code
...@@ -506,3 +507,29 @@ jobs: ...@@ -506,3 +507,29 @@ jobs:
file: ./ops/docker/Dockerfile.rpc-proxy file: ./ops/docker/Dockerfile.rpc-proxy
push: true push: true
tags: ethereumoptimism/rpc-proxy:${{ needs.canary-publish.outputs.rpc-proxy }} tags: ethereumoptimism/rpc-proxy:${{ needs.canary-publish.outputs.rpc-proxy }}
batch-submitter-service:
name: Publish batch-submitter-service Version ${{ needs.canary-publish.outputs.canary-docker-tag }}
needs: canary-publish
if: needs.canary-publish.outputs.batch-submitter-service != ''
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Login to Docker Hub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_ACCESS_TOKEN_USERNAME }}
password: ${{ secrets.DOCKERHUB_ACCESS_TOKEN_SECRET }}
- name: Build and push
uses: docker/build-push-action@v2
with:
context: .
file: ./ops/docker/Dockerfile.batch-submitter-service
push: true
tags: ethereumoptimism/batch-submitter-service:${{ needs.canary-publish.outputs.batch-submitter-service }}
...@@ -25,6 +25,7 @@ jobs: ...@@ -25,6 +25,7 @@ jobs:
hardhat-node: ${{ steps.packages.outputs.hardhat-node }} hardhat-node: ${{ steps.packages.outputs.hardhat-node }}
op-exporter : ${{ steps.packages.outputs.op-exporter }} op-exporter : ${{ steps.packages.outputs.op-exporter }}
l2geth-exporter : ${{ steps.packages.outputs.l2geth-exporter }} l2geth-exporter : ${{ steps.packages.outputs.l2geth-exporter }}
batch-submitter-service : ${{ steps.packages.outputs.batch-submitter-service }}
steps: steps:
- name: Checkout Repo - name: Checkout Repo
...@@ -502,3 +503,29 @@ jobs: ...@@ -502,3 +503,29 @@ jobs:
push: true push: true
tags: ethereumoptimism/replica-healthcheck:${{ needs.builder.outputs.replica-healthcheck }},ethereumoptimism/replica-healthcheck:latest tags: ethereumoptimism/replica-healthcheck:${{ needs.builder.outputs.replica-healthcheck }},ethereumoptimism/replica-healthcheck:latest
build-args: BUILDER_TAG=${{ needs.builder.outputs.builder }} build-args: BUILDER_TAG=${{ needs.builder.outputs.builder }}
batch-submitter-service:
name: Publish batch-submitter-service Version ${{ needs.release.outputs.batch-submitter-service }}
needs: release
if: needs.release.outputs.batch-submitter-service != ''
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Login to Docker Hub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_ACCESS_TOKEN_USERNAME }}
password: ${{ secrets.DOCKERHUB_ACCESS_TOKEN_SECRET }}
- name: Build and push
uses: docker/build-push-action@v2
with:
context: .
file: ./ops/docker/Dockerfile.batch-submitter-service
push: true
tags: ethereumoptimism/batch-submitter-service:${{ needs.release.outputs.batch-submitter-service }},ethereumoptimism/batch-submitter-service:latest
...@@ -164,6 +164,7 @@ func NewBatchSubmitter(cfg Config, gitVersion string) (*BatchSubmitter, error) { ...@@ -164,6 +164,7 @@ func NewBatchSubmitter(cfg Config, gitVersion string) (*BatchSubmitter, error) {
GasRetryIncrement: utils.GasPriceFromGwei(cfg.GasRetryIncrement), GasRetryIncrement: utils.GasPriceFromGwei(cfg.GasRetryIncrement),
ResubmissionTimeout: cfg.ResubmissionTimeout, ResubmissionTimeout: cfg.ResubmissionTimeout,
ReceiptQueryInterval: time.Second, ReceiptQueryInterval: time.Second,
NumConfirmations: cfg.NumConfirmations,
} }
var batchTxService *Service var batchTxService *Service
......
...@@ -32,12 +32,13 @@ func init() { ...@@ -32,12 +32,13 @@ func init() {
} }
var ( var (
testPrivKey *ecdsa.PrivateKey testPrivKey *ecdsa.PrivateKey
testWalletAddr common.Address testWalletAddr common.Address
testChainID *big.Int // 1 testChainID = big.NewInt(1)
testNonce = uint64(2) testNonce = uint64(2)
testGasPrice *big.Int // 3 testGasPrice = big.NewInt(3)
testGasLimit = uint64(4) testGasLimit = uint64(4)
testBlockNumber = uint64(5)
) )
// TestCraftClearingTx asserts that CraftClearingTx produces the expected // TestCraftClearingTx asserts that CraftClearingTx produces the expected
...@@ -102,11 +103,20 @@ func TestSignClearingTxEstimateGasFail(t *testing.T) { ...@@ -102,11 +103,20 @@ func TestSignClearingTxEstimateGasFail(t *testing.T) {
} }
type clearPendingTxHarness struct { type clearPendingTxHarness struct {
l1Client drivers.L1Client l1Client *mock.L1Client
txMgr txmgr.TxManager txMgr txmgr.TxManager
} }
func newClearPendingTxHarness(l1ClientConfig mock.L1ClientConfig) *clearPendingTxHarness { func newClearPendingTxHarnessWithNumConfs(
l1ClientConfig mock.L1ClientConfig,
numConfirmations uint64,
) *clearPendingTxHarness {
if l1ClientConfig.BlockNumber == nil {
l1ClientConfig.BlockNumber = func(_ context.Context) (uint64, error) {
return testBlockNumber, nil
}
}
if l1ClientConfig.NonceAt == nil { if l1ClientConfig.NonceAt == nil {
l1ClientConfig.NonceAt = func(_ context.Context, _ common.Address, _ *big.Int) (uint64, error) { l1ClientConfig.NonceAt = func(_ context.Context, _ common.Address, _ *big.Int) (uint64, error) {
return testNonce, nil return testNonce, nil
...@@ -125,6 +135,7 @@ func newClearPendingTxHarness(l1ClientConfig mock.L1ClientConfig) *clearPendingT ...@@ -125,6 +135,7 @@ func newClearPendingTxHarness(l1ClientConfig mock.L1ClientConfig) *clearPendingT
GasRetryIncrement: utils.GasPriceFromGwei(5), GasRetryIncrement: utils.GasPriceFromGwei(5),
ResubmissionTimeout: time.Second, ResubmissionTimeout: time.Second,
ReceiptQueryInterval: 50 * time.Millisecond, ReceiptQueryInterval: 50 * time.Millisecond,
NumConfirmations: numConfirmations,
}, l1Client) }, l1Client)
return &clearPendingTxHarness{ return &clearPendingTxHarness{
...@@ -133,6 +144,10 @@ func newClearPendingTxHarness(l1ClientConfig mock.L1ClientConfig) *clearPendingT ...@@ -133,6 +144,10 @@ func newClearPendingTxHarness(l1ClientConfig mock.L1ClientConfig) *clearPendingT
} }
} }
func newClearPendingTxHarness(l1ClientConfig mock.L1ClientConfig) *clearPendingTxHarness {
return newClearPendingTxHarnessWithNumConfs(l1ClientConfig, 1)
}
// TestClearPendingTxClearingTxÇonfirms asserts the happy path where our // TestClearPendingTxClearingTxÇonfirms asserts the happy path where our
// clearing transactions confirms unobstructed. // clearing transactions confirms unobstructed.
func TestClearPendingTxClearingTxConfirms(t *testing.T) { func TestClearPendingTxClearingTxConfirms(t *testing.T) {
...@@ -142,7 +157,8 @@ func TestClearPendingTxClearingTxConfirms(t *testing.T) { ...@@ -142,7 +157,8 @@ func TestClearPendingTxClearingTxConfirms(t *testing.T) {
}, },
TransactionReceipt: func(_ context.Context, txHash common.Hash) (*types.Receipt, error) { TransactionReceipt: func(_ context.Context, txHash common.Hash) (*types.Receipt, error) {
return &types.Receipt{ return &types.Receipt{
TxHash: txHash, TxHash: txHash,
BlockNumber: big.NewInt(int64(testBlockNumber)),
}, nil }, nil
}, },
}) })
...@@ -190,3 +206,42 @@ func TestClearPendingTxTimeout(t *testing.T) { ...@@ -190,3 +206,42 @@ func TestClearPendingTxTimeout(t *testing.T) {
) )
require.Equal(t, txmgr.ErrPublishTimeout, err) require.Equal(t, txmgr.ErrPublishTimeout, err)
} }
// TestClearPendingTxMultipleConfs tests we wait the appropriate number of
// confirmations for the clearing transaction to confirm.
func TestClearPendingTxMultipleConfs(t *testing.T) {
const numConfs = 2
// Instantly confirm transaction.
h := newClearPendingTxHarnessWithNumConfs(mock.L1ClientConfig{
SendTransaction: func(_ context.Context, _ *types.Transaction) error {
return nil
},
TransactionReceipt: func(_ context.Context, txHash common.Hash) (*types.Receipt, error) {
return &types.Receipt{
TxHash: txHash,
BlockNumber: big.NewInt(int64(testBlockNumber)),
}, nil
},
}, numConfs)
// The txmgr should timeout waiting for the txn to confirm.
err := drivers.ClearPendingTx(
"test", context.Background(), h.txMgr, h.l1Client, testWalletAddr,
testPrivKey, testChainID,
)
require.Equal(t, txmgr.ErrPublishTimeout, err)
// Now set the chain height to the earliest the transaction will be
// considered sufficiently confirmed.
h.l1Client.SetBlockNumberFunc(func(_ context.Context) (uint64, error) {
return testBlockNumber + numConfs - 1, nil
})
// Publishing should succeed.
err = drivers.ClearPendingTx(
"test", context.Background(), h.txMgr, h.l1Client, testWalletAddr,
testPrivKey, testChainID,
)
require.Nil(t, err)
}
...@@ -13,6 +13,9 @@ import ( ...@@ -13,6 +13,9 @@ import (
// L1ClientConfig houses the internal methods that are executed by the mock // L1ClientConfig houses the internal methods that are executed by the mock
// L1Client. Any members left as nil will panic on execution. // L1Client. Any members left as nil will panic on execution.
type L1ClientConfig struct { type L1ClientConfig struct {
// BlockNumber returns the most recent block number.
BlockNumber func(context.Context) (uint64, error)
// EstimateGas tries to estimate the gas needed to execute a specific // EstimateGas tries to estimate the gas needed to execute a specific
// transaction based on the current pending state of the backend blockchain. // transaction based on the current pending state of the backend blockchain.
// There is no guarantee that this is the true gas limit requirement as // There is no guarantee that this is the true gas limit requirement as
...@@ -50,6 +53,14 @@ func NewL1Client(cfg L1ClientConfig) *L1Client { ...@@ -50,6 +53,14 @@ func NewL1Client(cfg L1ClientConfig) *L1Client {
} }
} }
// BlockNumber returns the most recent block number.
func (c *L1Client) BlockNumber(ctx context.Context) (uint64, error) {
c.mu.RLock()
defer c.mu.RUnlock()
return c.cfg.BlockNumber(ctx)
}
// EstimateGas executes the mock EstimateGas method. // EstimateGas executes the mock EstimateGas method.
func (c *L1Client) EstimateGas(ctx context.Context, call ethereum.CallMsg) (uint64, error) { func (c *L1Client) EstimateGas(ctx context.Context, call ethereum.CallMsg) (uint64, error) {
c.mu.RLock() c.mu.RLock()
...@@ -82,6 +93,16 @@ func (c *L1Client) TransactionReceipt(ctx context.Context, txHash common.Hash) ( ...@@ -82,6 +93,16 @@ func (c *L1Client) TransactionReceipt(ctx context.Context, txHash common.Hash) (
return c.cfg.TransactionReceipt(ctx, txHash) return c.cfg.TransactionReceipt(ctx, txHash)
} }
// SetBlockNumberFunc overwrites the mock BlockNumber method.
func (c *L1Client) SetBlockNumberFunc(
f func(context.Context) (uint64, error)) {
c.mu.Lock()
defer c.mu.Unlock()
c.cfg.BlockNumber = f
}
// SetEstimateGasFunc overrwrites the mock EstimateGas method. // SetEstimateGasFunc overrwrites the mock EstimateGas method.
func (c *L1Client) SetEstimateGasFunc( func (c *L1Client) SetEstimateGasFunc(
f func(context.Context, ethereum.CallMsg) (uint64, error)) { f func(context.Context, ethereum.CallMsg) (uint64, error)) {
......
...@@ -52,6 +52,10 @@ type Config struct { ...@@ -52,6 +52,10 @@ type Config struct {
// query the backend to check for confirmations after a tx at a // query the backend to check for confirmations after a tx at a
// specific gas price has been published. // specific gas price has been published.
ReceiptQueryInterval time.Duration ReceiptQueryInterval time.Duration
// NumConfirmations specifies how many blocks are need to consider a
// transaction confirmed.
NumConfirmations uint64
} }
// TxManager is an interface that allows callers to reliably publish txs, // TxManager is an interface that allows callers to reliably publish txs,
...@@ -71,6 +75,9 @@ type TxManager interface { ...@@ -71,6 +75,9 @@ type TxManager interface {
// //
// NOTE: This is a subset of bind.DeployBackend. // NOTE: This is a subset of bind.DeployBackend.
type ReceiptSource interface { type ReceiptSource interface {
// BlockNumber returns the most recent block number.
BlockNumber(ctx context.Context) (uint64, error)
// TransactionReceipt queries the backend for a receipt associated with // TransactionReceipt queries the backend for a receipt associated with
// txHash. If lookup does not fail, but the transaction is not found, // txHash. If lookup does not fail, but the transaction is not found,
// nil should be returned for both values. // nil should be returned for both values.
...@@ -90,6 +97,10 @@ type SimpleTxManager struct { ...@@ -90,6 +97,10 @@ type SimpleTxManager struct {
func NewSimpleTxManager( func NewSimpleTxManager(
name string, cfg Config, backend ReceiptSource) *SimpleTxManager { name string, cfg Config, backend ReceiptSource) *SimpleTxManager {
if cfg.NumConfirmations == 0 {
panic("txmgr: NumConfirmations cannot be zero")
}
return &SimpleTxManager{ return &SimpleTxManager{
name: name, name: name,
cfg: cfg, cfg: cfg,
...@@ -148,6 +159,7 @@ func (m *SimpleTxManager) Send( ...@@ -148,6 +159,7 @@ func (m *SimpleTxManager) Send(
// back to the main event loop if found. // back to the main event loop if found.
receipt, err := WaitMined( receipt, err := WaitMined(
ctxc, m.backend, tx, m.cfg.ReceiptQueryInterval, ctxc, m.backend, tx, m.cfg.ReceiptQueryInterval,
m.cfg.NumConfirmations,
) )
if err != nil { if err != nil {
log.Debug(name+" send tx failed", "hash", txHash, log.Debug(name+" send tx failed", "hash", txHash,
...@@ -220,6 +232,7 @@ func WaitMined( ...@@ -220,6 +232,7 @@ func WaitMined(
backend ReceiptSource, backend ReceiptSource,
tx *types.Transaction, tx *types.Transaction,
queryInterval time.Duration, queryInterval time.Duration,
numConfirmations uint64,
) (*types.Receipt, error) { ) (*types.Receipt, error) {
queryTicker := time.NewTicker(queryInterval) queryTicker := time.NewTicker(queryInterval)
...@@ -229,14 +242,42 @@ func WaitMined( ...@@ -229,14 +242,42 @@ func WaitMined(
for { for {
receipt, err := backend.TransactionReceipt(ctx, txHash) receipt, err := backend.TransactionReceipt(ctx, txHash)
if receipt != nil { switch {
return receipt, nil case receipt != nil:
} txHeight := receipt.BlockNumber.Uint64()
tipHeight, err := backend.BlockNumber(ctx)
if err != nil {
log.Error("Unable to fetch block number", "err", err)
break
}
if err != nil { log.Trace("Transaction mined, checking confirmations",
"txHash", txHash, "txHeight", txHeight,
"tipHeight", tipHeight,
"numConfirmations", numConfirmations)
// The transaction is considered confirmed when
// txHeight+numConfirmations-1 <= tipHeight. Note that the -1 is
// needed to account for the fact that confirmations have an
// inherent off-by-one, i.e. when using 1 confirmation the
// transaction should be confirmed when txHeight is equal to
// tipHeight. The equation is rewritten in this form to avoid
// underflows.
if txHeight+numConfirmations <= tipHeight+1 {
log.Info("Transaction confirmed", "txHash", txHash)
return receipt, nil
}
// Safe to subtract since we know the LHS above is greater.
confsRemaining := (txHeight + numConfirmations) - (tipHeight + 1)
log.Info("Transaction not yet confirmed", "txHash", txHash,
"confsRemaining", confsRemaining)
case err != nil:
log.Trace("Receipt retrievel failed", "hash", txHash, log.Trace("Receipt retrievel failed", "hash", txHash,
"err", err) "err", err)
} else {
default:
log.Trace("Transaction not yet mined", "hash", txHash) log.Trace("Transaction not yet mined", "hash", txHash)
} }
......
...@@ -95,13 +95,23 @@ func newTestHarnessWithConfig(cfg txmgr.Config) *testHarness { ...@@ -95,13 +95,23 @@ func newTestHarnessWithConfig(cfg txmgr.Config) *testHarness {
// newTestHarness initializes a testHarness with a defualt configuration that is // newTestHarness initializes a testHarness with a defualt configuration that is
// suitable for most tests. // suitable for most tests.
func newTestHarness() *testHarness { func newTestHarness() *testHarness {
return newTestHarnessWithConfig(txmgr.Config{ return newTestHarnessWithConfig(configWithNumConfs(1))
}
func configWithNumConfs(numConfirmations uint64) txmgr.Config {
return txmgr.Config{
MinGasPrice: new(big.Int).SetUint64(5), MinGasPrice: new(big.Int).SetUint64(5),
MaxGasPrice: new(big.Int).SetUint64(50), MaxGasPrice: new(big.Int).SetUint64(50),
GasRetryIncrement: new(big.Int).SetUint64(5), GasRetryIncrement: new(big.Int).SetUint64(5),
ResubmissionTimeout: time.Second, ResubmissionTimeout: time.Second,
ReceiptQueryInterval: 50 * time.Millisecond, ReceiptQueryInterval: 50 * time.Millisecond,
}) NumConfirmations: numConfirmations,
}
}
type minedTxInfo struct {
gasPrice *big.Int
blockNumber uint64
} }
// mockBackend implements txmgr.ReceiptSource that tracks mined transactions // mockBackend implements txmgr.ReceiptSource that tracks mined transactions
...@@ -109,25 +119,42 @@ func newTestHarness() *testHarness { ...@@ -109,25 +119,42 @@ func newTestHarness() *testHarness {
type mockBackend struct { type mockBackend struct {
mu sync.RWMutex mu sync.RWMutex
// txHashMinedWithGasPrice tracks the has of a mined transaction to its // blockHeight tracks the current height of the chain.
// gas price. blockHeight uint64
txHashMinedWithGasPrice map[common.Hash]*big.Int
// minedTxs maps the hash of a mined transaction to its details.
minedTxs map[common.Hash]minedTxInfo
} }
// newMockBackend initializes a new mockBackend. // newMockBackend initializes a new mockBackend.
func newMockBackend() *mockBackend { func newMockBackend() *mockBackend {
return &mockBackend{ return &mockBackend{
txHashMinedWithGasPrice: make(map[common.Hash]*big.Int), minedTxs: make(map[common.Hash]minedTxInfo),
} }
} }
// mine records a (txHash, gasPrice) as confirmed. Subsequent calls to // mine records a (txHash, gasPrice) as confirmed. Subsequent calls to
// TransactionReceipt with a matching txHash will result in a non-nil receipt. // TransactionReceipt with a matching txHash will result in a non-nil receipt.
func (b *mockBackend) mine(txHash common.Hash, gasPrice *big.Int) { // If a nil txHash is supplied this has the effect of mining an empty block.
func (b *mockBackend) mine(txHash *common.Hash, gasPrice *big.Int) {
b.mu.Lock() b.mu.Lock()
defer b.mu.Unlock() defer b.mu.Unlock()
b.txHashMinedWithGasPrice[txHash] = gasPrice b.blockHeight++
if txHash != nil {
b.minedTxs[*txHash] = minedTxInfo{
gasPrice: gasPrice,
blockNumber: b.blockHeight,
}
}
}
// BlockNumber returns the most recent block number.
func (b *mockBackend) BlockNumber(ctx context.Context) (uint64, error) {
b.mu.RLock()
defer b.mu.RUnlock()
return b.blockHeight, nil
} }
// TransactionReceipt queries the mockBackend for a mined txHash. If none is // TransactionReceipt queries the mockBackend for a mined txHash. If none is
...@@ -142,7 +169,7 @@ func (b *mockBackend) TransactionReceipt( ...@@ -142,7 +169,7 @@ func (b *mockBackend) TransactionReceipt(
b.mu.RLock() b.mu.RLock()
defer b.mu.RUnlock() defer b.mu.RUnlock()
gasPrice, ok := b.txHashMinedWithGasPrice[txHash] txInfo, ok := b.minedTxs[txHash]
if !ok { if !ok {
return nil, nil return nil, nil
} }
...@@ -150,8 +177,9 @@ func (b *mockBackend) TransactionReceipt( ...@@ -150,8 +177,9 @@ func (b *mockBackend) TransactionReceipt(
// Return the gas price for the transaction in the GasUsed field so that // Return the gas price for the transaction in the GasUsed field so that
// we can assert the proper tx confirmed in our tests. // we can assert the proper tx confirmed in our tests.
return &types.Receipt{ return &types.Receipt{
TxHash: txHash, TxHash: txHash,
GasUsed: gasPrice.Uint64(), GasUsed: txInfo.gasPrice.Uint64(),
BlockNumber: big.NewInt(int64(txInfo.blockNumber)),
}, nil }, nil
} }
...@@ -168,7 +196,8 @@ func TestTxMgrConfirmAtMinGasPrice(t *testing.T) { ...@@ -168,7 +196,8 @@ func TestTxMgrConfirmAtMinGasPrice(t *testing.T) {
tx := types.NewTx(&types.LegacyTx{ tx := types.NewTx(&types.LegacyTx{
GasPrice: gasPrice, GasPrice: gasPrice,
}) })
h.backend.mine(tx.Hash(), gasPrice) txHash := tx.Hash()
h.backend.mine(&txHash, gasPrice)
return tx, nil return tx, nil
} }
...@@ -220,7 +249,8 @@ func TestTxMgrConfirmsAtMaxGasPrice(t *testing.T) { ...@@ -220,7 +249,8 @@ func TestTxMgrConfirmsAtMaxGasPrice(t *testing.T) {
GasPrice: gasPrice, GasPrice: gasPrice,
}) })
if gasPrice.Cmp(h.cfg.MaxGasPrice) == 0 { if gasPrice.Cmp(h.cfg.MaxGasPrice) == 0 {
h.backend.mine(tx.Hash(), gasPrice) txHash := tx.Hash()
h.backend.mine(&txHash, gasPrice)
} }
return tx, nil return tx, nil
} }
...@@ -252,7 +282,8 @@ func TestTxMgrConfirmsAtMaxGasPriceDelayed(t *testing.T) { ...@@ -252,7 +282,8 @@ func TestTxMgrConfirmsAtMaxGasPriceDelayed(t *testing.T) {
// should still return an error beforehand. // should still return an error beforehand.
if gasPrice.Cmp(h.cfg.MaxGasPrice) == 0 { if gasPrice.Cmp(h.cfg.MaxGasPrice) == 0 {
time.AfterFunc(2*time.Second, func() { time.AfterFunc(2*time.Second, func() {
h.backend.mine(tx.Hash(), gasPrice) txHash := tx.Hash()
h.backend.mine(&txHash, gasPrice)
}) })
} }
return tx, nil return tx, nil
...@@ -308,7 +339,8 @@ func TestTxMgrOnlyOnePublicationSucceeds(t *testing.T) { ...@@ -308,7 +339,8 @@ func TestTxMgrOnlyOnePublicationSucceeds(t *testing.T) {
tx := types.NewTx(&types.LegacyTx{ tx := types.NewTx(&types.LegacyTx{
GasPrice: gasPrice, GasPrice: gasPrice,
}) })
h.backend.mine(tx.Hash(), gasPrice) txHash := tx.Hash()
h.backend.mine(&txHash, gasPrice)
return tx, nil return tx, nil
} }
...@@ -338,7 +370,8 @@ func TestTxMgrConfirmsMinGasPriceAfterBumping(t *testing.T) { ...@@ -338,7 +370,8 @@ func TestTxMgrConfirmsMinGasPriceAfterBumping(t *testing.T) {
// Delay mining the tx with the min gas price. // Delay mining the tx with the min gas price.
if gasPrice.Cmp(h.cfg.MinGasPrice) == 0 { if gasPrice.Cmp(h.cfg.MinGasPrice) == 0 {
time.AfterFunc(5*time.Second, func() { time.AfterFunc(5*time.Second, func() {
h.backend.mine(tx.Hash(), gasPrice) txHash := tx.Hash()
h.backend.mine(&txHash, gasPrice)
}) })
} }
return tx, nil return tx, nil
...@@ -361,10 +394,10 @@ func TestWaitMinedReturnsReceiptOnFirstSuccess(t *testing.T) { ...@@ -361,10 +394,10 @@ func TestWaitMinedReturnsReceiptOnFirstSuccess(t *testing.T) {
// Create a tx and mine it immediately using the default backend. // Create a tx and mine it immediately using the default backend.
tx := types.NewTx(&types.LegacyTx{}) tx := types.NewTx(&types.LegacyTx{})
txHash := tx.Hash() txHash := tx.Hash()
h.backend.mine(txHash, new(big.Int)) h.backend.mine(&txHash, new(big.Int))
ctx := context.Background() ctx := context.Background()
receipt, err := txmgr.WaitMined(ctx, h.backend, tx, 50*time.Millisecond) receipt, err := txmgr.WaitMined(ctx, h.backend, tx, 50*time.Millisecond, 1)
require.Nil(t, err) require.Nil(t, err)
require.NotNil(t, receipt) require.NotNil(t, receipt)
require.Equal(t, receipt.TxHash, txHash) require.Equal(t, receipt.TxHash, txHash)
...@@ -383,16 +416,73 @@ func TestWaitMinedCanBeCanceled(t *testing.T) { ...@@ -383,16 +416,73 @@ func TestWaitMinedCanBeCanceled(t *testing.T) {
// Create an unimined tx. // Create an unimined tx.
tx := types.NewTx(&types.LegacyTx{}) tx := types.NewTx(&types.LegacyTx{})
receipt, err := txmgr.WaitMined(ctx, h.backend, tx, 50*time.Millisecond) receipt, err := txmgr.WaitMined(ctx, h.backend, tx, 50*time.Millisecond, 1)
require.Equal(t, err, context.DeadlineExceeded) require.Equal(t, err, context.DeadlineExceeded)
require.Nil(t, receipt) require.Nil(t, receipt)
} }
// TestWaitMinedMultipleConfs asserts that WaitMiend will properly wait for more
// than one confirmation.
func TestWaitMinedMultipleConfs(t *testing.T) {
t.Parallel()
const numConfs = 2
h := newTestHarnessWithConfig(configWithNumConfs(numConfs))
ctxt, cancel := context.WithTimeout(context.Background(), time.Second)
defer cancel()
// Create an unimined tx.
tx := types.NewTx(&types.LegacyTx{})
txHash := tx.Hash()
h.backend.mine(&txHash, new(big.Int))
receipt, err := txmgr.WaitMined(ctxt, h.backend, tx, 50*time.Millisecond, numConfs)
require.Equal(t, err, context.DeadlineExceeded)
require.Nil(t, receipt)
ctxt, cancel = context.WithTimeout(context.Background(), time.Second)
defer cancel()
// Mine an empty block, tx should now be confirmed.
h.backend.mine(nil, nil)
receipt, err = txmgr.WaitMined(ctxt, h.backend, tx, 50*time.Millisecond, numConfs)
require.Nil(t, err)
require.NotNil(t, receipt)
require.Equal(t, txHash, receipt.TxHash)
}
// TestManagerPanicOnZeroConfs ensures that the NewSimpleTxManager will panic
// when attempting to configure with NumConfirmations set to zero.
func TestManagerPanicOnZeroConfs(t *testing.T) {
t.Parallel()
defer func() {
if r := recover(); r == nil {
t.Fatal("NewSimpleTxManager should panic when using zero conf")
}
}()
_ = newTestHarnessWithConfig(configWithNumConfs(0))
}
// failingBackend implements txmgr.ReceiptSource, returning a failure on the // failingBackend implements txmgr.ReceiptSource, returning a failure on the
// first call but a success on the second call. This allows us to test that the // first call but a success on the second call. This allows us to test that the
// inner loop of WaitMined properly handles this case. // inner loop of WaitMined properly handles this case.
type failingBackend struct { type failingBackend struct {
returnSuccess bool returnSuccessBlockNumber bool
returnSuccessReceipt bool
}
// BlockNumber for the failingBackend returns errRpcFailure on the first
// invocation and a fixed block height on subsequent calls.
func (b *failingBackend) BlockNumber(ctx context.Context) (uint64, error) {
if !b.returnSuccessBlockNumber {
b.returnSuccessBlockNumber = true
return 0, errRpcFailure
}
return 1, nil
} }
// TransactionReceipt for the failingBackend returns errRpcFailure on the first // TransactionReceipt for the failingBackend returns errRpcFailure on the first
...@@ -400,13 +490,14 @@ type failingBackend struct { ...@@ -400,13 +490,14 @@ type failingBackend struct {
func (b *failingBackend) TransactionReceipt( func (b *failingBackend) TransactionReceipt(
ctx context.Context, txHash common.Hash) (*types.Receipt, error) { ctx context.Context, txHash common.Hash) (*types.Receipt, error) {
if !b.returnSuccess { if !b.returnSuccessReceipt {
b.returnSuccess = true b.returnSuccessReceipt = true
return nil, errRpcFailure return nil, errRpcFailure
} }
return &types.Receipt{ return &types.Receipt{
TxHash: txHash, TxHash: txHash,
BlockNumber: big.NewInt(1),
}, nil }, nil
} }
...@@ -424,7 +515,7 @@ func TestWaitMinedReturnsReceiptAfterFailure(t *testing.T) { ...@@ -424,7 +515,7 @@ func TestWaitMinedReturnsReceiptAfterFailure(t *testing.T) {
txHash := tx.Hash() txHash := tx.Hash()
ctx := context.Background() ctx := context.Background()
receipt, err := txmgr.WaitMined(ctx, &borkedBackend, tx, 50*time.Millisecond) receipt, err := txmgr.WaitMined(ctx, &borkedBackend, tx, 50*time.Millisecond, 1)
require.Nil(t, err) require.Nil(t, err)
require.NotNil(t, receipt) require.NotNil(t, receipt)
require.Equal(t, receipt.TxHash, txHash) require.Equal(t, receipt.TxHash, txHash)
......
...@@ -11,3 +11,11 @@ fmt: ...@@ -11,3 +11,11 @@ fmt:
go mod tidy go mod tidy
gofmt -w . gofmt -w .
.PHONY: fmt .PHONY: fmt
test:
go test -race -v ./...
.PHONY: test
lint:
go vet ./...
.PHONY: test
\ No newline at end of file
...@@ -62,6 +62,10 @@ var ( ...@@ -62,6 +62,10 @@ var (
Message: "backend returned an invalid response", Message: "backend returned an invalid response",
HTTPErrorCode: 500, HTTPErrorCode: 500,
} }
ErrTooManyBatchRequests = &RPCErr{
Code: JSONRPCErrorInternal - 14,
Message: "too many RPC calls in batch request",
}
) )
func ErrInvalidRequest(msg string) *RPCErr { func ErrInvalidRequest(msg string) *RPCErr {
...@@ -631,7 +635,7 @@ func (w *WSProxier) close() { ...@@ -631,7 +635,7 @@ func (w *WSProxier) close() {
} }
func (w *WSProxier) prepareClientMsg(msg []byte) (*RPCReq, error) { func (w *WSProxier) prepareClientMsg(msg []byte) (*RPCReq, error) {
req, err := ParseRPCReq(bytes.NewReader(msg)) req, err := ParseRPCReq(msg)
if err != nil { if err != nil {
return nil, err return nil, err
} }
......
...@@ -2,7 +2,7 @@ package proxyd ...@@ -2,7 +2,7 @@ package proxyd
import ( import (
"context" "context"
"encoding/json" "time"
"github.com/go-redis/redis/v8" "github.com/go-redis/redis/v8"
"github.com/golang/snappy" "github.com/golang/snappy"
...@@ -14,10 +14,11 @@ type Cache interface { ...@@ -14,10 +14,11 @@ type Cache interface {
Put(ctx context.Context, key string, value string) error Put(ctx context.Context, key string, value string) error
} }
// assuming an average RPCRes size of 3 KB
const ( const (
memoryCacheLimit = 4096 // assuming an average RPCRes size of 3 KB
numBlockConfirmations = 50 memoryCacheLimit = 4096
// Set a large ttl to avoid expirations. However, a ttl must be set for volatile-lru to take effect.
redisTTL = 30 * 7 * 24 * time.Hour
) )
type cache struct { type cache struct {
...@@ -62,17 +63,50 @@ func (c *redisCache) Get(ctx context.Context, key string) (string, error) { ...@@ -62,17 +63,50 @@ func (c *redisCache) Get(ctx context.Context, key string) (string, error) {
if err == redis.Nil { if err == redis.Nil {
return "", nil return "", nil
} else if err != nil { } else if err != nil {
RecordRedisError("CacheGet")
return "", err return "", err
} }
return val, nil return val, nil
} }
func (c *redisCache) Put(ctx context.Context, key string, value string) error { func (c *redisCache) Put(ctx context.Context, key string, value string) error {
err := c.rdb.Set(ctx, key, value, 0).Err() err := c.rdb.SetEX(ctx, key, value, redisTTL).Err()
if err != nil {
RecordRedisError("CacheSet")
}
return err return err
} }
type cacheWithCompression struct {
cache Cache
}
func newCacheWithCompression(cache Cache) *cacheWithCompression {
return &cacheWithCompression{cache}
}
func (c *cacheWithCompression) Get(ctx context.Context, key string) (string, error) {
encodedVal, err := c.cache.Get(ctx, key)
if err != nil {
return "", err
}
if encodedVal == "" {
return "", nil
}
val, err := snappy.Decode(nil, []byte(encodedVal))
if err != nil {
return "", err
}
return string(val), nil
}
func (c *cacheWithCompression) Put(ctx context.Context, key string, value string) error {
encodedVal := snappy.Encode(nil, []byte(value))
return c.cache.Put(ctx, key, string(encodedVal))
}
type GetLatestBlockNumFn func(ctx context.Context) (uint64, error) type GetLatestBlockNumFn func(ctx context.Context) (uint64, error)
type GetLatestGasPriceFn func(ctx context.Context) (uint64, error)
type RPCCache interface { type RPCCache interface {
GetRPC(ctx context.Context, req *RPCReq) (*RPCRes, error) GetRPC(ctx context.Context, req *RPCReq) (*RPCRes, error)
...@@ -80,19 +114,24 @@ type RPCCache interface { ...@@ -80,19 +114,24 @@ type RPCCache interface {
} }
type rpcCache struct { type rpcCache struct {
cache Cache cache Cache
getLatestBlockNumFn GetLatestBlockNumFn handlers map[string]RPCMethodHandler
handlers map[string]RPCMethodHandler
} }
func newRPCCache(cache Cache, getLatestBlockNumFn GetLatestBlockNumFn) RPCCache { func newRPCCache(cache Cache, getLatestBlockNumFn GetLatestBlockNumFn, getLatestGasPriceFn GetLatestGasPriceFn, numBlockConfirmations int) RPCCache {
handlers := map[string]RPCMethodHandler{ handlers := map[string]RPCMethodHandler{
"eth_chainId": &StaticRPCMethodHandler{"eth_chainId"}, "eth_chainId": &StaticMethodHandler{},
"net_version": &StaticRPCMethodHandler{"net_version"}, "net_version": &StaticMethodHandler{},
"eth_getBlockByNumber": &EthGetBlockByNumberMethod{getLatestBlockNumFn}, "eth_getBlockByNumber": &EthGetBlockByNumberMethodHandler{cache, getLatestBlockNumFn, numBlockConfirmations},
"eth_getBlockRange": &EthGetBlockRangeMethod{getLatestBlockNumFn}, "eth_getBlockRange": &EthGetBlockRangeMethodHandler{cache, getLatestBlockNumFn, numBlockConfirmations},
"eth_blockNumber": &EthBlockNumberMethodHandler{getLatestBlockNumFn},
"eth_gasPrice": &EthGasPriceMethodHandler{getLatestGasPriceFn},
"eth_call": &EthCallMethodHandler{cache, getLatestBlockNumFn, numBlockConfirmations},
}
return &rpcCache{
cache: cache,
handlers: handlers,
} }
return &rpcCache{cache: cache, getLatestBlockNumFn: getLatestBlockNumFn, handlers: handlers}
} }
func (c *rpcCache) GetRPC(ctx context.Context, req *RPCReq) (*RPCRes, error) { func (c *rpcCache) GetRPC(ctx context.Context, req *RPCReq) (*RPCRes, error) {
...@@ -100,34 +139,15 @@ func (c *rpcCache) GetRPC(ctx context.Context, req *RPCReq) (*RPCRes, error) { ...@@ -100,34 +139,15 @@ func (c *rpcCache) GetRPC(ctx context.Context, req *RPCReq) (*RPCRes, error) {
if handler == nil { if handler == nil {
return nil, nil return nil, nil
} }
cacheable, err := handler.IsCacheable(req) res, err := handler.GetRPCMethod(ctx, req)
if err != nil { if res != nil {
return nil, err if res == nil {
} RecordCacheMiss(req.Method)
if !cacheable { } else {
return nil, nil RecordCacheHit(req.Method)
} }
key := handler.CacheKey(req)
encodedVal, err := c.cache.Get(ctx, key)
if err != nil {
return nil, err
}
if encodedVal == "" {
return nil, nil
}
val, err := snappy.Decode(nil, []byte(encodedVal))
if err != nil {
return nil, err
}
res := new(RPCRes)
err = json.Unmarshal(val, res)
if err != nil {
return nil, err
} }
res.ID = req.ID return res, err
return res, nil
} }
func (c *rpcCache) PutRPC(ctx context.Context, req *RPCReq, res *RPCRes) error { func (c *rpcCache) PutRPC(ctx context.Context, req *RPCReq, res *RPCRes) error {
...@@ -135,23 +155,5 @@ func (c *rpcCache) PutRPC(ctx context.Context, req *RPCReq, res *RPCRes) error { ...@@ -135,23 +155,5 @@ func (c *rpcCache) PutRPC(ctx context.Context, req *RPCReq, res *RPCRes) error {
if handler == nil { if handler == nil {
return nil return nil
} }
cacheable, err := handler.IsCacheable(req) return handler.PutRPCMethod(ctx, req, res)
if err != nil {
return err
}
if !cacheable {
return nil
}
requiresConfirmations, err := handler.RequiresUnconfirmedBlocks(ctx, req)
if err != nil {
return err
}
if requiresConfirmations {
return nil
}
key := handler.CacheKey(req)
val := mustMarshalJSON(res)
encodedVal := snappy.Encode(nil, val)
return c.cache.Put(ctx, key, string(encodedVal))
} }
...@@ -9,14 +9,16 @@ import ( ...@@ -9,14 +9,16 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
func TestRPCCacheWhitelist(t *testing.T) { const numBlockConfirmations = 10
func TestRPCCacheImmutableRPCs(t *testing.T) {
const blockHead = math.MaxUint64 const blockHead = math.MaxUint64
ctx := context.Background() ctx := context.Background()
fn := func(ctx context.Context) (uint64, error) { getBlockNum := func(ctx context.Context) (uint64, error) {
return blockHead, nil return blockHead, nil
} }
cache := newRPCCache(newMemoryCache(), fn) cache := newRPCCache(newMemoryCache(), getBlockNum, nil, numBlockConfirmations)
ID := []byte(strconv.Itoa(1)) ID := []byte(strconv.Itoa(1))
rpcs := []struct { rpcs := []struct {
...@@ -120,6 +122,82 @@ func TestRPCCacheWhitelist(t *testing.T) { ...@@ -120,6 +122,82 @@ func TestRPCCacheWhitelist(t *testing.T) {
} }
} }
func TestRPCCacheBlockNumber(t *testing.T) {
var blockHead uint64 = 0x1000
var gasPrice uint64 = 0x100
ctx := context.Background()
ID := []byte(strconv.Itoa(1))
getGasPrice := func(ctx context.Context) (uint64, error) {
return gasPrice, nil
}
getBlockNum := func(ctx context.Context) (uint64, error) {
return blockHead, nil
}
cache := newRPCCache(newMemoryCache(), getBlockNum, getGasPrice, numBlockConfirmations)
req := &RPCReq{
JSONRPC: "2.0",
Method: "eth_blockNumber",
ID: ID,
}
res := &RPCRes{
JSONRPC: "2.0",
Result: `0x1000`,
ID: ID,
}
err := cache.PutRPC(ctx, req, res)
require.NoError(t, err)
cachedRes, err := cache.GetRPC(ctx, req)
require.NoError(t, err)
require.Equal(t, res, cachedRes)
blockHead = 0x1001
cachedRes, err = cache.GetRPC(ctx, req)
require.NoError(t, err)
require.Equal(t, &RPCRes{JSONRPC: "2.0", Result: `0x1001`, ID: ID}, cachedRes)
}
func TestRPCCacheGasPrice(t *testing.T) {
var blockHead uint64 = 0x1000
var gasPrice uint64 = 0x100
ctx := context.Background()
ID := []byte(strconv.Itoa(1))
getGasPrice := func(ctx context.Context) (uint64, error) {
return gasPrice, nil
}
getBlockNum := func(ctx context.Context) (uint64, error) {
return blockHead, nil
}
cache := newRPCCache(newMemoryCache(), getBlockNum, getGasPrice, numBlockConfirmations)
req := &RPCReq{
JSONRPC: "2.0",
Method: "eth_gasPrice",
ID: ID,
}
res := &RPCRes{
JSONRPC: "2.0",
Result: `0x100`,
ID: ID,
}
err := cache.PutRPC(ctx, req, res)
require.NoError(t, err)
cachedRes, err := cache.GetRPC(ctx, req)
require.NoError(t, err)
require.Equal(t, res, cachedRes)
gasPrice = 0x101
cachedRes, err = cache.GetRPC(ctx, req)
require.NoError(t, err)
require.Equal(t, &RPCRes{JSONRPC: "2.0", Result: `0x101`, ID: ID}, cachedRes)
}
func TestRPCCacheUnsupportedMethod(t *testing.T) { func TestRPCCacheUnsupportedMethod(t *testing.T) {
const blockHead = math.MaxUint64 const blockHead = math.MaxUint64
ctx := context.Background() ctx := context.Background()
...@@ -127,17 +205,17 @@ func TestRPCCacheUnsupportedMethod(t *testing.T) { ...@@ -127,17 +205,17 @@ func TestRPCCacheUnsupportedMethod(t *testing.T) {
fn := func(ctx context.Context) (uint64, error) { fn := func(ctx context.Context) (uint64, error) {
return blockHead, nil return blockHead, nil
} }
cache := newRPCCache(newMemoryCache(), fn) cache := newRPCCache(newMemoryCache(), fn, nil, numBlockConfirmations)
ID := []byte(strconv.Itoa(1)) ID := []byte(strconv.Itoa(1))
req := &RPCReq{ req := &RPCReq{
JSONRPC: "2.0", JSONRPC: "2.0",
Method: "eth_blockNumber", Method: "eth_syncing",
ID: ID, ID: ID,
} }
res := &RPCRes{ res := &RPCRes{
JSONRPC: "2.0", JSONRPC: "2.0",
Result: `0x1000`, Result: false,
ID: ID, ID: ID,
} }
...@@ -149,6 +227,62 @@ func TestRPCCacheUnsupportedMethod(t *testing.T) { ...@@ -149,6 +227,62 @@ func TestRPCCacheUnsupportedMethod(t *testing.T) {
require.Nil(t, cachedRes) require.Nil(t, cachedRes)
} }
func TestRPCCacheEthGetBlockByNumber(t *testing.T) {
ctx := context.Background()
var blockHead uint64
fn := func(ctx context.Context) (uint64, error) {
return blockHead, nil
}
makeCache := func() RPCCache { return newRPCCache(newMemoryCache(), fn, nil, numBlockConfirmations) }
ID := []byte(strconv.Itoa(1))
req := &RPCReq{
JSONRPC: "2.0",
Method: "eth_getBlockByNumber",
Params: []byte(`["0xa", false]`),
ID: ID,
}
res := &RPCRes{
JSONRPC: "2.0",
Result: `{"difficulty": "0x1", "number": "0x1"}`,
ID: ID,
}
req2 := &RPCReq{
JSONRPC: "2.0",
Method: "eth_getBlockByNumber",
Params: []byte(`["0xb", false]`),
ID: ID,
}
res2 := &RPCRes{
JSONRPC: "2.0",
Result: `{"difficulty": "0x2", "number": "0x2"}`,
ID: ID,
}
t.Run("set multiple finalized blocks", func(t *testing.T) {
blockHead = 100
cache := makeCache()
require.NoError(t, cache.PutRPC(ctx, req, res))
require.NoError(t, cache.PutRPC(ctx, req2, res2))
cachedRes, err := cache.GetRPC(ctx, req)
require.NoError(t, err)
require.Equal(t, res, cachedRes)
cachedRes, err = cache.GetRPC(ctx, req2)
require.NoError(t, err)
require.Equal(t, res2, cachedRes)
})
t.Run("unconfirmed block", func(t *testing.T) {
blockHead = 0xc
cache := makeCache()
require.NoError(t, cache.PutRPC(ctx, req, res))
cachedRes, err := cache.GetRPC(ctx, req)
require.NoError(t, err)
require.Nil(t, cachedRes)
})
}
func TestRPCCacheEthGetBlockByNumberForRecentBlocks(t *testing.T) { func TestRPCCacheEthGetBlockByNumberForRecentBlocks(t *testing.T) {
ctx := context.Background() ctx := context.Background()
...@@ -156,7 +290,7 @@ func TestRPCCacheEthGetBlockByNumberForRecentBlocks(t *testing.T) { ...@@ -156,7 +290,7 @@ func TestRPCCacheEthGetBlockByNumberForRecentBlocks(t *testing.T) {
fn := func(ctx context.Context) (uint64, error) { fn := func(ctx context.Context) (uint64, error) {
return blockHead, nil return blockHead, nil
} }
cache := newRPCCache(newMemoryCache(), fn) cache := newRPCCache(newMemoryCache(), fn, nil, numBlockConfirmations)
ID := []byte(strconv.Itoa(1)) ID := []byte(strconv.Itoa(1))
rpcs := []struct { rpcs := []struct {
...@@ -164,20 +298,6 @@ func TestRPCCacheEthGetBlockByNumberForRecentBlocks(t *testing.T) { ...@@ -164,20 +298,6 @@ func TestRPCCacheEthGetBlockByNumberForRecentBlocks(t *testing.T) {
res *RPCRes res *RPCRes
name string name string
}{ }{
{
req: &RPCReq{
JSONRPC: "2.0",
Method: "eth_getBlockByNumber",
Params: []byte(`["0x1", false]`),
ID: ID,
},
res: &RPCRes{
JSONRPC: "2.0",
Result: `{"difficulty": "0x1", "number": "0x1"}`,
ID: ID,
},
name: "recent block num",
},
{ {
req: &RPCReq{ req: &RPCReq{
JSONRPC: "2.0", JSONRPC: "2.0",
...@@ -227,7 +347,7 @@ func TestRPCCacheEthGetBlockByNumberInvalidRequest(t *testing.T) { ...@@ -227,7 +347,7 @@ func TestRPCCacheEthGetBlockByNumberInvalidRequest(t *testing.T) {
fn := func(ctx context.Context) (uint64, error) { fn := func(ctx context.Context) (uint64, error) {
return blockHead, nil return blockHead, nil
} }
cache := newRPCCache(newMemoryCache(), fn) cache := newRPCCache(newMemoryCache(), fn, nil, numBlockConfirmations)
ID := []byte(strconv.Itoa(1)) ID := []byte(strconv.Itoa(1))
req := &RPCReq{ req := &RPCReq{
...@@ -250,6 +370,56 @@ func TestRPCCacheEthGetBlockByNumberInvalidRequest(t *testing.T) { ...@@ -250,6 +370,56 @@ func TestRPCCacheEthGetBlockByNumberInvalidRequest(t *testing.T) {
require.Nil(t, cachedRes) require.Nil(t, cachedRes)
} }
func TestRPCCacheEthGetBlockRange(t *testing.T) {
ctx := context.Background()
var blockHead uint64
fn := func(ctx context.Context) (uint64, error) {
return blockHead, nil
}
makeCache := func() RPCCache { return newRPCCache(newMemoryCache(), fn, nil, numBlockConfirmations) }
ID := []byte(strconv.Itoa(1))
t.Run("finalized block", func(t *testing.T) {
req := &RPCReq{
JSONRPC: "2.0",
Method: "eth_getBlockRange",
Params: []byte(`["0x1", "0x10", false]`),
ID: ID,
}
res := &RPCRes{
JSONRPC: "2.0",
Result: `[{"number": "0x1"}, {"number": "0x10"}]`,
ID: ID,
}
blockHead = 0x1000
cache := makeCache()
require.NoError(t, cache.PutRPC(ctx, req, res))
cachedRes, err := cache.GetRPC(ctx, req)
require.NoError(t, err)
require.Equal(t, res, cachedRes)
})
t.Run("unconfirmed block", func(t *testing.T) {
cache := makeCache()
req := &RPCReq{
JSONRPC: "2.0",
Method: "eth_getBlockRange",
Params: []byte(`["0x1", "0x1000", false]`),
ID: ID,
}
res := &RPCRes{
JSONRPC: "2.0",
Result: `[{"number": "0x1"}, {"number": "0x2"}]`,
ID: ID,
}
require.NoError(t, cache.PutRPC(ctx, req, res))
cachedRes, err := cache.GetRPC(ctx, req)
require.NoError(t, err)
require.Nil(t, cachedRes)
})
}
func TestRPCCacheEthGetBlockRangeForRecentBlocks(t *testing.T) { func TestRPCCacheEthGetBlockRangeForRecentBlocks(t *testing.T) {
ctx := context.Background() ctx := context.Background()
...@@ -257,7 +427,7 @@ func TestRPCCacheEthGetBlockRangeForRecentBlocks(t *testing.T) { ...@@ -257,7 +427,7 @@ func TestRPCCacheEthGetBlockRangeForRecentBlocks(t *testing.T) {
fn := func(ctx context.Context) (uint64, error) { fn := func(ctx context.Context) (uint64, error) {
return blockHead, nil return blockHead, nil
} }
cache := newRPCCache(newMemoryCache(), fn) cache := newRPCCache(newMemoryCache(), fn, nil, numBlockConfirmations)
ID := []byte(strconv.Itoa(1)) ID := []byte(strconv.Itoa(1))
rpcs := []struct { rpcs := []struct {
...@@ -265,20 +435,6 @@ func TestRPCCacheEthGetBlockRangeForRecentBlocks(t *testing.T) { ...@@ -265,20 +435,6 @@ func TestRPCCacheEthGetBlockRangeForRecentBlocks(t *testing.T) {
res *RPCRes res *RPCRes
name string name string
}{ }{
{
req: &RPCReq{
JSONRPC: "2.0",
Method: "eth_getBlockRange",
Params: []byte(`["0x1", "0x1000", false]`),
ID: ID,
},
res: &RPCRes{
JSONRPC: "2.0",
Result: `[{"number": "0x1"}, {"number": "0x2"}]`,
ID: ID,
},
name: "recent block num",
},
{ {
req: &RPCReq{ req: &RPCReq{
JSONRPC: "2.0", JSONRPC: "2.0",
...@@ -342,7 +498,7 @@ func TestRPCCacheEthGetBlockRangeInvalidRequest(t *testing.T) { ...@@ -342,7 +498,7 @@ func TestRPCCacheEthGetBlockRangeInvalidRequest(t *testing.T) {
fn := func(ctx context.Context) (uint64, error) { fn := func(ctx context.Context) (uint64, error) {
return blockHead, nil return blockHead, nil
} }
cache := newRPCCache(newMemoryCache(), fn) cache := newRPCCache(newMemoryCache(), fn, nil, numBlockConfirmations)
ID := []byte(strconv.Itoa(1)) ID := []byte(strconv.Itoa(1))
rpcs := []struct { rpcs := []struct {
...@@ -391,3 +547,76 @@ func TestRPCCacheEthGetBlockRangeInvalidRequest(t *testing.T) { ...@@ -391,3 +547,76 @@ func TestRPCCacheEthGetBlockRangeInvalidRequest(t *testing.T) {
}) })
} }
} }
func TestRPCCacheEthCall(t *testing.T) {
ctx := context.Background()
var blockHead uint64
fn := func(ctx context.Context) (uint64, error) {
return blockHead, nil
}
makeCache := func() RPCCache { return newRPCCache(newMemoryCache(), fn, nil, numBlockConfirmations) }
ID := []byte(strconv.Itoa(1))
req := &RPCReq{
JSONRPC: "2.0",
Method: "eth_call",
Params: []byte(`[{"to": "0xDEADBEEF", "data": "0x1"}, "0x10"]`),
ID: ID,
}
res := &RPCRes{
JSONRPC: "2.0",
Result: `0x0`,
ID: ID,
}
t.Run("finalized block", func(t *testing.T) {
blockHead = 0x100
cache := makeCache()
err := cache.PutRPC(ctx, req, res)
require.NoError(t, err)
cachedRes, err := cache.GetRPC(ctx, req)
require.NoError(t, err)
require.Equal(t, res, cachedRes)
})
t.Run("unconfirmed block", func(t *testing.T) {
blockHead = 0x10
cache := makeCache()
require.NoError(t, cache.PutRPC(ctx, req, res))
cachedRes, err := cache.GetRPC(ctx, req)
require.NoError(t, err)
require.Nil(t, cachedRes)
})
t.Run("latest block", func(t *testing.T) {
blockHead = 0x100
req := &RPCReq{
JSONRPC: "2.0",
Method: "eth_call",
Params: []byte(`[{"to": "0xDEADBEEF", "data": "0x1"}, "latest"]`),
ID: ID,
}
cache := makeCache()
require.NoError(t, cache.PutRPC(ctx, req, res))
cachedRes, err := cache.GetRPC(ctx, req)
require.NoError(t, err)
require.Nil(t, cachedRes)
})
t.Run("pending block", func(t *testing.T) {
blockHead = 0x100
req := &RPCReq{
JSONRPC: "2.0",
Method: "eth_call",
Params: []byte(`[{"to": "0xDEADBEEF", "data": "0x1"}, "pending"]`),
ID: ID,
}
cache := makeCache()
require.NoError(t, cache.PutRPC(ctx, req, res))
cachedRes, err := cache.GetRPC(ctx, req)
require.NoError(t, err)
require.Nil(t, cachedRes)
})
}
...@@ -2,6 +2,8 @@ package main ...@@ -2,6 +2,8 @@ package main
import ( import (
"os" "os"
"os/signal"
"syscall"
"github.com/BurntSushi/toml" "github.com/BurntSushi/toml"
"github.com/ethereum-optimism/optimism/go/proxyd" "github.com/ethereum-optimism/optimism/go/proxyd"
...@@ -35,7 +37,14 @@ func main() { ...@@ -35,7 +37,14 @@ func main() {
log.Crit("error reading config file", "err", err) log.Crit("error reading config file", "err", err)
} }
if err := proxyd.Start(config); err != nil { shutdown, err := proxyd.Start(config)
if err != nil {
log.Crit("error starting proxyd", "err", err) log.Crit("error starting proxyd", "err", err)
} }
sig := make(chan os.Signal, 1)
signal.Notify(sig, syscall.SIGINT, syscall.SIGTERM)
recvSig := <-sig
log.Info("caught signal, shutting down", "signal", recvSig)
shutdown()
} }
...@@ -15,8 +15,9 @@ type ServerConfig struct { ...@@ -15,8 +15,9 @@ type ServerConfig struct {
} }
type CacheConfig struct { type CacheConfig struct {
Enabled bool `toml:"enabled"` Enabled bool `toml:"enabled"`
BlockSyncRPCURL string `toml:"block_sync_rpc_url"` BlockSyncRPCURL string `toml:"block_sync_rpc_url"`
NumBlockConfirmations int `toml:"num_block_confirmations"`
} }
type RedisConfig struct { type RedisConfig struct {
...@@ -61,11 +62,11 @@ type MethodMappingsConfig map[string]string ...@@ -61,11 +62,11 @@ type MethodMappingsConfig map[string]string
type Config struct { type Config struct {
WSBackendGroup string `toml:"ws_backend_group"` WSBackendGroup string `toml:"ws_backend_group"`
Server *ServerConfig `toml:"server"` Server ServerConfig `toml:"server"`
Cache *CacheConfig `toml:"cache"` Cache CacheConfig `toml:"cache"`
Redis *RedisConfig `toml:"redis"` Redis RedisConfig `toml:"redis"`
Metrics *MetricsConfig `toml:"metrics"` Metrics MetricsConfig `toml:"metrics"`
BackendOptions *BackendOptions `toml:"backend"` BackendOptions BackendOptions `toml:"backend"`
Backends BackendsConfig `toml:"backends"` Backends BackendsConfig `toml:"backends"`
Authentication map[string]string `toml:"authentication"` Authentication map[string]string `toml:"authentication"`
BackendGroups BackendGroupsConfig `toml:"backend_groups"` BackendGroups BackendGroupsConfig `toml:"backend_groups"`
......
...@@ -4,13 +4,18 @@ go 1.16 ...@@ -4,13 +4,18 @@ go 1.16
require ( require (
github.com/BurntSushi/toml v0.4.1 github.com/BurntSushi/toml v0.4.1
github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a // indirect
github.com/alicebob/miniredis v2.5.0+incompatible
github.com/ethereum/go-ethereum v1.10.11 github.com/ethereum/go-ethereum v1.10.11
github.com/go-redis/redis/v8 v8.11.4 github.com/go-redis/redis/v8 v8.11.4
github.com/golang/snappy v0.0.4 github.com/golang/snappy v0.0.4
github.com/gomodule/redigo v1.8.8 // indirect
github.com/gorilla/mux v1.8.0 github.com/gorilla/mux v1.8.0
github.com/gorilla/websocket v1.4.2 github.com/gorilla/websocket v1.4.2
github.com/hashicorp/golang-lru v0.5.5-0.20210104140557-80c98217689d github.com/hashicorp/golang-lru v0.5.5-0.20210104140557-80c98217689d
github.com/prometheus/client_golang v1.11.0 github.com/prometheus/client_golang v1.11.0
github.com/rs/cors v1.8.0 github.com/rs/cors v1.8.0
github.com/stretchr/testify v1.7.0 github.com/stretchr/testify v1.7.0
github.com/yuin/gopher-lua v0.0.0-20210529063254-f4c35e4016d9 // indirect
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect
) )
...@@ -48,6 +48,10 @@ github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuy ...@@ -48,6 +48,10 @@ github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuy
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho=
github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a h1:HbKu58rmZpUGpz5+4FfNmIU+FmZg2P3Xaj2v2bfNWmk=
github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a/go.mod h1:SGnFV6hVsYE877CKEZ6tDNTjaSXYUk6QqoIK6PrAtcc=
github.com/alicebob/miniredis v2.5.0+incompatible h1:yBHoLpsyjupjz3NL3MhKMVkR41j82Yjf3KFv7ApYzUI=
github.com/alicebob/miniredis v2.5.0+incompatible/go.mod h1:8HZjEj4yU0dwhYHky+DxYx+6BMjkBbe5ONFIF1MXffk=
github.com/allegro/bigcache v1.2.1-0.20190218064605-e24eb225f156/go.mod h1:Cb/ax3seSYIx7SuZdm2G2xzfwmv3TPSk2ucNfQESPXM= github.com/allegro/bigcache v1.2.1-0.20190218064605-e24eb225f156/go.mod h1:Cb/ax3seSYIx7SuZdm2G2xzfwmv3TPSk2ucNfQESPXM=
github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8= github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8=
github.com/apache/arrow/go/arrow v0.0.0-20191024131854-af6fa24be0db/go.mod h1:VTxUBvSJ3s3eHAg65PNgrsn5BtqCRPdmyXh6rAfdxN0= github.com/apache/arrow/go/arrow v0.0.0-20191024131854-af6fa24be0db/go.mod h1:VTxUBvSJ3s3eHAg65PNgrsn5BtqCRPdmyXh6rAfdxN0=
...@@ -185,6 +189,8 @@ github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEW ...@@ -185,6 +189,8 @@ github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEW
github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM=
github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/golangci/lint-1 v0.0.0-20181222135242-d2cdd8c08219/go.mod h1:/X8TswGSh1pIozq4ZwCfxS0WA5JGXguxk94ar/4c87Y= github.com/golangci/lint-1 v0.0.0-20181222135242-d2cdd8c08219/go.mod h1:/X8TswGSh1pIozq4ZwCfxS0WA5JGXguxk94ar/4c87Y=
github.com/gomodule/redigo v1.8.8 h1:f6cXq6RRfiyrOJEV7p3JhLDlmawGBVBBP1MggY8Mo4E=
github.com/gomodule/redigo v1.8.8/go.mod h1:7ArFNvsTjH8GMMzB4uy1snslv2BwmginuMs06a1uzZE=
github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/flatbuffers v1.11.0/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= github.com/google/flatbuffers v1.11.0/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8=
...@@ -427,6 +433,8 @@ github.com/valyala/fasttemplate v1.2.1/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+ ...@@ -427,6 +433,8 @@ github.com/valyala/fasttemplate v1.2.1/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+
github.com/willf/bitset v1.1.3/go.mod h1:RjeCKbqT1RxIR/KWY6phxZiaY1IyutSBfGjNPySAYV4= github.com/willf/bitset v1.1.3/go.mod h1:RjeCKbqT1RxIR/KWY6phxZiaY1IyutSBfGjNPySAYV4=
github.com/xlab/treeprint v0.0.0-20180616005107-d6fb6747feb6/go.mod h1:ce1O1j6UtZfjr22oyGxGLbauSBp2YVXpARAosm7dHBg= github.com/xlab/treeprint v0.0.0-20180616005107-d6fb6747feb6/go.mod h1:ce1O1j6UtZfjr22oyGxGLbauSBp2YVXpARAosm7dHBg=
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/gopher-lua v0.0.0-20210529063254-f4c35e4016d9 h1:k/gmLsJDWwWqbLCur2yWnJzwQEKRcAHXo6seXGuSwWw=
github.com/yuin/gopher-lua v0.0.0-20210529063254-f4c35e4016d9/go.mod h1:E1AXubJBdNmFERAOucpDIxNzeGfLzg0mYh+UfMWdChA=
go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU=
go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8=
go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
...@@ -520,6 +528,7 @@ golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5h ...@@ -520,6 +528,7 @@ golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5h
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190204203706-41f3e6584952/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
...@@ -679,8 +688,9 @@ gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= ...@@ -679,8 +688,9 @@ gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo=
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gotest.tools v2.2.0+incompatible/go.mod h1:DsYFclhRJ6vuDpmuTbkuFWG+y2sxOXAzmJt81HFBacw= gotest.tools v2.2.0+incompatible/go.mod h1:DsYFclhRJ6vuDpmuTbkuFWG+y2sxOXAzmJt81HFBacw=
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
......
package integration_tests
import (
"bytes"
"fmt"
"github.com/alicebob/miniredis"
"github.com/ethereum-optimism/optimism/go/proxyd"
"github.com/stretchr/testify/require"
"os"
"testing"
"time"
)
func TestCaching(t *testing.T) {
redis, err := miniredis.Run()
require.NoError(t, err)
defer redis.Close()
hdlr := NewRPCResponseHandler(map[string]string{
"eth_chainId": "0x420",
"net_version": "0x1234",
"eth_blockNumber": "0x64",
"eth_getBlockByNumber": "dummy_block",
"eth_call": "dummy_call",
})
backend := NewMockBackend(hdlr)
defer backend.Close()
require.NoError(t, os.Setenv("GOOD_BACKEND_RPC_URL", backend.URL()))
require.NoError(t, os.Setenv("REDIS_URL", fmt.Sprintf("redis://127.0.0.1:%s", redis.Port())))
config := ReadConfig("caching")
client := NewProxydClient("http://127.0.0.1:8545")
shutdown, err := proxyd.Start(config)
require.NoError(t, err)
defer shutdown()
// allow time for the block number fetcher to fire
time.Sleep(1500 * time.Millisecond)
tests := []struct {
method string
params []interface{}
response string
backendCalls int
}{
{
"eth_chainId",
nil,
"{\"jsonrpc\": \"2.0\", \"result\": \"0x420\", \"id\": 999}",
1,
},
{
"net_version",
nil,
"{\"jsonrpc\": \"2.0\", \"result\": \"0x1234\", \"id\": 999}",
1,
},
{
"eth_getBlockByNumber",
[]interface{}{
"0x1",
true,
},
"{\"jsonrpc\": \"2.0\", \"result\": \"dummy_block\", \"id\": 999}",
1,
},
{
"eth_call",
[]interface{}{
struct {
To string `json:"to"`
}{
"0x1234",
},
"0x60",
},
"{\"id\":999,\"jsonrpc\":\"2.0\",\"result\":\"dummy_call\"}",
1,
},
{
"eth_blockNumber",
nil,
"{\"id\":999,\"jsonrpc\":\"2.0\",\"result\":\"0x64\"}",
0,
},
{
"eth_call",
[]interface{}{
struct {
To string `json:"to"`
}{
"0x1234",
},
"latest",
},
"{\"id\":999,\"jsonrpc\":\"2.0\",\"result\":\"dummy_call\"}",
2,
},
{
"eth_call",
[]interface{}{
struct {
To string `json:"to"`
}{
"0x1234",
},
"pending",
},
"{\"id\":999,\"jsonrpc\":\"2.0\",\"result\":\"dummy_call\"}",
2,
},
}
for _, tt := range tests {
t.Run(tt.method, func(t *testing.T) {
resRaw, _, err := client.SendRPC(tt.method, tt.params)
require.NoError(t, err)
resCache, _, err := client.SendRPC(tt.method, tt.params)
require.NoError(t, err)
RequireEqualJSON(t, []byte(tt.response), resCache)
RequireEqualJSON(t, resRaw, resCache)
require.Equal(t, tt.backendCalls, countRequests(backend, tt.method))
backend.Reset()
})
}
hdlr.SetResponse("eth_blockNumber", "0x100")
time.Sleep(1500 * time.Millisecond)
resRaw, _, err := client.SendRPC("eth_blockNumber", nil)
RequireEqualJSON(t, []byte("{\"id\":999,\"jsonrpc\":\"2.0\",\"result\":\"0x100\"}"), resRaw)
}
func countRequests(backend *MockBackend, name string) int {
var count int
for _, req := range backend.Requests() {
if bytes.Contains(req.Body, []byte(name)) {
count++
}
}
return count
}
package integration_tests
import (
"fmt"
"github.com/ethereum-optimism/optimism/go/proxyd"
"github.com/stretchr/testify/require"
"net/http"
"os"
"sync/atomic"
"testing"
"time"
)
const (
goodResponse = `{"jsonrpc": "2.0", "result": "hello", "id": 999}`
noBackendsResponse = `{"error":{"code":-32011,"message":"no backends available for method"},"id":999,"jsonrpc":"2.0"}`
)
func TestFailover(t *testing.T) {
goodBackend := NewMockBackend(SingleResponseHandler(200, goodResponse))
defer goodBackend.Close()
badBackend := NewMockBackend(nil)
defer badBackend.Close()
require.NoError(t, os.Setenv("GOOD_BACKEND_RPC_URL", goodBackend.URL()))
require.NoError(t, os.Setenv("BAD_BACKEND_RPC_URL", badBackend.URL()))
config := ReadConfig("failover")
client := NewProxydClient("http://127.0.0.1:8545")
shutdown, err := proxyd.Start(config)
require.NoError(t, err)
defer shutdown()
tests := []struct {
name string
handler http.Handler
}{
{
"backend responds 200 with non-JSON response",
http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(200)
w.Write([]byte("this data is not JSON!"))
}),
},
{
"backend responds with no body",
http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(200)
}),
},
}
codes := []int{
300,
301,
302,
401,
403,
429,
500,
503,
}
for _, code := range codes {
tests = append(tests, struct {
name string
handler http.Handler
}{
fmt.Sprintf("backend %d", code),
http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(code)
}),
})
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
badBackend.SetHandler(tt.handler)
res, statusCode, err := client.SendRPC("eth_chainId", nil)
require.NoError(t, err)
require.Equal(t, 200, statusCode)
RequireEqualJSON(t, []byte(goodResponse), res)
require.Equal(t, 1, len(badBackend.Requests()))
require.Equal(t, 1, len(goodBackend.Requests()))
badBackend.Reset()
goodBackend.Reset()
})
}
t.Run("backend times out and falls back to another", func(t *testing.T) {
badBackend.SetHandler(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
time.Sleep(2 * time.Second)
w.Write([]byte("{}"))
}))
res, statusCode, err := client.SendRPC("eth_chainId", nil)
require.NoError(t, err)
require.Equal(t, 200, statusCode)
RequireEqualJSON(t, []byte(goodResponse), res)
require.Equal(t, 1, len(badBackend.Requests()))
require.Equal(t, 1, len(goodBackend.Requests()))
goodBackend.Reset()
badBackend.Reset()
})
t.Run("works with a batch request", func(t *testing.T) {
badBackend.SetHandler(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(500)
}))
res, statusCode, err := client.SendBatchRPC(
NewRPCReq("1", "eth_chainId", nil),
NewRPCReq("1", "eth_chainId", nil),
)
require.NoError(t, err)
require.Equal(t, 200, statusCode)
RequireEqualJSON(t, []byte(asArray(goodResponse, goodResponse)), res)
require.Equal(t, 2, len(badBackend.Requests()))
require.Equal(t, 2, len(goodBackend.Requests()))
})
}
func TestRetries(t *testing.T) {
backend := NewMockBackend(SingleResponseHandler(200, goodResponse))
defer backend.Close()
require.NoError(t, os.Setenv("GOOD_BACKEND_RPC_URL", backend.URL()))
config := ReadConfig("retries")
client := NewProxydClient("http://127.0.0.1:8545")
shutdown, err := proxyd.Start(config)
require.NoError(t, err)
defer shutdown()
attempts := int32(0)
backend.SetHandler(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
incremented := atomic.AddInt32(&attempts, 1)
if incremented != 2 {
w.WriteHeader(500)
return
}
w.Write([]byte(goodResponse))
}))
// test case where request eventually succeeds
res, statusCode, err := client.SendRPC("eth_chainId", nil)
require.NoError(t, err)
require.Equal(t, 200, statusCode)
RequireEqualJSON(t, []byte(goodResponse), res)
require.Equal(t, 2, len(backend.Requests()))
// test case where it does not
backend.Reset()
attempts = -10
res, statusCode, err = client.SendRPC("eth_chainId", nil)
require.NoError(t, err)
require.Equal(t, 503, statusCode)
RequireEqualJSON(t, []byte(noBackendsResponse), res)
require.Equal(t, 4, len(backend.Requests()))
}
func TestOutOfServiceInterval(t *testing.T) {
goodBackend := NewMockBackend(SingleResponseHandler(200, goodResponse))
defer goodBackend.Close()
badBackend := NewMockBackend(nil)
defer badBackend.Close()
require.NoError(t, os.Setenv("GOOD_BACKEND_RPC_URL", goodBackend.URL()))
require.NoError(t, os.Setenv("BAD_BACKEND_RPC_URL", badBackend.URL()))
config := ReadConfig("out_of_service_interval")
client := NewProxydClient("http://127.0.0.1:8545")
shutdown, err := proxyd.Start(config)
require.NoError(t, err)
defer shutdown()
okHandler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Write([]byte(goodResponse))
})
badBackend.SetHandler(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(503)
}))
goodBackend.SetHandler(okHandler)
res, statusCode, err := client.SendRPC("eth_chainId", nil)
require.NoError(t, err)
require.Equal(t, 200, statusCode)
RequireEqualJSON(t, []byte(goodResponse), res)
require.Equal(t, 2, len(badBackend.Requests()))
require.Equal(t, 1, len(goodBackend.Requests()))
res, statusCode, err = client.SendRPC("eth_chainId", nil)
require.NoError(t, err)
require.Equal(t, 200, statusCode)
RequireEqualJSON(t, []byte(goodResponse), res)
require.Equal(t, 2, len(badBackend.Requests()))
require.Equal(t, 2, len(goodBackend.Requests()))
res, statusCode, err = client.SendBatchRPC(
NewRPCReq("1", "eth_chainId", nil),
NewRPCReq("1", "eth_chainId", nil),
)
require.Equal(t, 2, len(badBackend.Requests()))
require.Equal(t, 4, len(goodBackend.Requests()))
time.Sleep(time.Second)
badBackend.SetHandler(okHandler)
res, statusCode, err = client.SendRPC("eth_chainId", nil)
require.NoError(t, err)
require.Equal(t, 200, statusCode)
RequireEqualJSON(t, []byte(goodResponse), res)
require.Equal(t, 3, len(badBackend.Requests()))
require.Equal(t, 4, len(goodBackend.Requests()))
}
package integration_tests
import (
"bytes"
"context"
"encoding/json"
"github.com/ethereum-optimism/optimism/go/proxyd"
"io/ioutil"
"net/http"
"net/http/httptest"
"sync"
)
type RecordedRequest struct {
Method string
Headers http.Header
Body []byte
}
type MockBackend struct {
handler http.Handler
server *httptest.Server
mtx sync.RWMutex
requests []*RecordedRequest
}
func SingleResponseHandler(code int, response string) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(code)
w.Write([]byte(response))
}
}
type RPCResponseHandler struct {
mtx sync.RWMutex
rpcResponses map[string]string
}
func NewRPCResponseHandler(rpcResponses map[string]string) *RPCResponseHandler {
return &RPCResponseHandler{
rpcResponses: rpcResponses,
}
}
func (h *RPCResponseHandler) SetResponse(method, response string) {
h.mtx.Lock()
defer h.mtx.Unlock()
h.rpcResponses[method] = response
}
func (h *RPCResponseHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
body, err := ioutil.ReadAll(r.Body)
if err != nil {
panic(err)
}
req, err := proxyd.ParseRPCReq(body)
if err != nil {
panic(err)
}
h.mtx.RLock()
res := h.rpcResponses[req.Method]
h.mtx.RUnlock()
if res == "" {
w.WriteHeader(400)
return
}
out := &proxyd.RPCRes{
JSONRPC: proxyd.JSONRPCVersion,
Result: res,
ID: req.ID,
}
enc := json.NewEncoder(w)
if err := enc.Encode(out); err != nil {
panic(err)
}
}
func NewMockBackend(handler http.Handler) *MockBackend {
mb := &MockBackend{
handler: handler,
}
mb.server = httptest.NewServer(http.HandlerFunc(mb.wrappedHandler))
return mb
}
func (m *MockBackend) URL() string {
return m.server.URL
}
func (m *MockBackend) Close() {
m.server.Close()
}
func (m *MockBackend) SetHandler(handler http.Handler) {
m.mtx.Lock()
m.handler = handler
m.mtx.Unlock()
}
func (m *MockBackend) Reset() {
m.mtx.Lock()
m.requests = nil
m.mtx.Unlock()
}
func (m *MockBackend) Requests() []*RecordedRequest {
m.mtx.RLock()
defer m.mtx.RUnlock()
out := make([]*RecordedRequest, len(m.requests))
for i := 0; i < len(m.requests); i++ {
out[i] = m.requests[i]
}
return out
}
func (m *MockBackend) wrappedHandler(w http.ResponseWriter, r *http.Request) {
m.mtx.Lock()
body, err := ioutil.ReadAll(r.Body)
if err != nil {
panic(err)
}
clone := r.Clone(context.Background())
clone.Body = ioutil.NopCloser(bytes.NewReader(body))
m.requests = append(m.requests, &RecordedRequest{
Method: r.Method,
Headers: r.Header.Clone(),
Body: body,
})
m.handler.ServeHTTP(w, clone)
m.mtx.Unlock()
}
package integration_tests
import (
"github.com/ethereum-optimism/optimism/go/proxyd"
"github.com/stretchr/testify/require"
"os"
"testing"
)
type resWithCode struct {
code int
res []byte
}
func TestMaxRPSLimit(t *testing.T) {
goodBackend := NewMockBackend(SingleResponseHandler(200, goodResponse))
defer goodBackend.Close()
require.NoError(t, os.Setenv("GOOD_BACKEND_RPC_URL", goodBackend.URL()))
config := ReadConfig("rate_limit")
client := NewProxydClient("http://127.0.0.1:8545")
shutdown, err := proxyd.Start(config)
require.NoError(t, err)
defer shutdown()
resCh := make(chan *resWithCode)
for i := 0; i < 3; i++ {
go func() {
res, code, err := client.SendRPC("eth_chainId", nil)
require.NoError(t, err)
resCh <- &resWithCode{
code: code,
res: res,
}
}()
}
codes := make(map[int]int)
var limitedRes []byte
for i := 0; i < 3; i++ {
res := <-resCh
code := res.code
if codes[code] == 0 {
codes[code] = 1
} else {
codes[code] += 1
}
// 503 because there's only one backend available
if code == 503 {
limitedRes = res.res
}
}
require.Equal(t, 2, codes[200])
require.Equal(t, 1, codes[503])
RequireEqualJSON(t, []byte(noBackendsResponse), limitedRes)
}
[server]
rpc_port = 8545
[backend]
response_timeout_seconds = 1
[redis]
url = "$REDIS_URL"
[cache]
enabled = true
block_sync_rpc_url = "$GOOD_BACKEND_RPC_URL"
[backends]
[backends.good]
rpc_url = "$GOOD_BACKEND_RPC_URL"
ws_url = "$GOOD_BACKEND_RPC_URL"
[backend_groups]
[backend_groups.main]
backends = ["good"]
[rpc_method_mappings]
eth_chainId = "main"
net_version = "main"
eth_getBlockByNumber = "main"
eth_blockNumber = "main"
eth_call = "main"
[server]
rpc_port = 8545
[backend]
response_timeout_seconds = 1
[backends]
[backends.good]
rpc_url = "$GOOD_BACKEND_RPC_URL"
ws_url = "$GOOD_BACKEND_RPC_URL"
[backends.bad]
rpc_url = "$BAD_BACKEND_RPC_URL"
ws_url = "$BAD_BACKEND_RPC_URL"
[backend_groups]
[backend_groups.main]
backends = ["bad", "good"]
[rpc_method_mappings]
eth_chainId = "main"
\ No newline at end of file
[server]
rpc_port = 8545
[backend]
response_timeout_seconds = 1
max_retries = 1
out_of_service_seconds = 1
[backends]
[backends.good]
rpc_url = "$GOOD_BACKEND_RPC_URL"
ws_url = "$GOOD_BACKEND_RPC_URL"
[backends.bad]
rpc_url = "$BAD_BACKEND_RPC_URL"
ws_url = "$BAD_BACKEND_RPC_URL"
[backend_groups]
[backend_groups.main]
backends = ["bad", "good"]
[rpc_method_mappings]
eth_chainId = "main"
\ No newline at end of file
[server]
rpc_port = 8545
[backend]
response_timeout_seconds = 1
[backends]
[backends.good]
rpc_url = "$GOOD_BACKEND_RPC_URL"
ws_url = "$GOOD_BACKEND_RPC_URL"
max_rps = 2
[backend_groups]
[backend_groups.main]
backends = ["good"]
[rpc_method_mappings]
eth_chainId = "main"
\ No newline at end of file
[server]
rpc_port = 8545
[backend]
response_timeout_seconds = 1
max_retries = 3
[backends]
[backends.good]
rpc_url = "$GOOD_BACKEND_RPC_URL"
ws_url = "$GOOD_BACKEND_RPC_URL"
[backend_groups]
[backend_groups.main]
backends = ["good"]
[rpc_method_mappings]
eth_chainId = "main"
\ No newline at end of file
[server]
rpc_port = 8545
[backend]
response_timeout_seconds = 1
[backends]
[backends.good]
rpc_url = "$GOOD_BACKEND_RPC_URL"
ws_url = "$GOOD_BACKEND_RPC_URL"
[backend_groups]
[backend_groups.main]
backends = ["good"]
[rpc_method_mappings]
eth_chainId = "main"
\ No newline at end of file
package integration_tests
import (
"bytes"
"encoding/json"
"fmt"
"github.com/BurntSushi/toml"
"github.com/ethereum-optimism/optimism/go/proxyd"
"github.com/stretchr/testify/require"
"io/ioutil"
"net/http"
"testing"
)
type ProxydClient struct {
url string
}
func NewProxydClient(url string) *ProxydClient {
return &ProxydClient{url: url}
}
func (p *ProxydClient) SendRPC(method string, params []interface{}) ([]byte, int, error) {
rpcReq := NewRPCReq("999", method, params)
body, err := json.Marshal(rpcReq)
if err != nil {
panic(err)
}
return p.SendRequest(body)
}
func (p *ProxydClient) SendBatchRPC(reqs ...*proxyd.RPCReq) ([]byte, int, error) {
body, err := json.Marshal(reqs)
if err != nil {
panic(err)
}
return p.SendRequest(body)
}
func (p *ProxydClient) SendRequest(body []byte) ([]byte, int, error) {
res, err := http.Post(p.url, "application/json", bytes.NewReader(body))
if err != nil {
return nil, -1, err
}
defer res.Body.Close()
code := res.StatusCode
resBody, err := ioutil.ReadAll(res.Body)
if err != nil {
panic(err)
}
return resBody, code, nil
}
func RequireEqualJSON(t *testing.T, expected []byte, actual []byte) {
expJSON := canonicalizeJSON(t, expected)
actJSON := canonicalizeJSON(t, actual)
require.Equal(t, string(expJSON), string(actJSON))
}
func canonicalizeJSON(t *testing.T, in []byte) []byte {
var any interface{}
if in[0] == '[' {
any = make([]interface{}, 0)
} else {
any = make(map[string]interface{})
}
err := json.Unmarshal(in, &any)
require.NoError(t, err)
out, err := json.Marshal(any)
require.NoError(t, err)
return out
}
func ReadConfig(name string) *proxyd.Config {
config := new(proxyd.Config)
_, err := toml.DecodeFile(fmt.Sprintf("testdata/%s.toml", name), config)
if err != nil {
panic(err)
}
return config
}
func NewRPCReq(id string, method string, params []interface{}) *proxyd.RPCReq {
jsonParams, err := json.Marshal(params)
if err != nil {
panic(err)
}
return &proxyd.RPCReq{
JSONRPC: proxyd.JSONRPCVersion,
Method: method,
Params: jsonParams,
ID: []byte(id),
}
}
package integration_tests
import (
"github.com/ethereum-optimism/optimism/go/proxyd"
"github.com/stretchr/testify/require"
"os"
"strings"
"testing"
)
const (
notWhitelistedResponse = `{"jsonrpc":"2.0","error":{"code":-32001,"message":"rpc method is not whitelisted"},"id":999}`
parseErrResponse = `{"jsonrpc":"2.0","error":{"code":-32700,"message":"parse error"},"id":null}`
invalidJSONRPCVersionResponse = `{"error":{"code":-32601,"message":"invalid JSON-RPC version"},"id":null,"jsonrpc":"2.0"}`
invalidIDResponse = `{"error":{"code":-32601,"message":"invalid ID"},"id":null,"jsonrpc":"2.0"}`
invalidMethodResponse = `{"error":{"code":-32601,"message":"no method specified"},"id":null,"jsonrpc":"2.0"}`
invalidBatchLenResponse = `{"error":{"code":-32601,"message":"must specify at least one batch call"},"id":null,"jsonrpc":"2.0"}`
)
func TestSingleRPCValidation(t *testing.T) {
goodBackend := NewMockBackend(SingleResponseHandler(200, goodResponse))
defer goodBackend.Close()
require.NoError(t, os.Setenv("GOOD_BACKEND_RPC_URL", goodBackend.URL()))
config := ReadConfig("whitelist")
client := NewProxydClient("http://127.0.0.1:8545")
shutdown, err := proxyd.Start(config)
require.NoError(t, err)
defer shutdown()
tests := []struct {
name string
body string
res string
code int
}{
{
"body not JSON",
"this ain't an RPC call",
parseErrResponse,
400,
},
{
"body not RPC",
"{\"not\": \"rpc\"}",
invalidJSONRPCVersionResponse,
400,
},
{
"body missing RPC ID",
"{\"jsonrpc\": \"2.0\", \"method\": \"subtract\", \"params\": [42, 23]}",
invalidIDResponse,
400,
},
{
"body has array ID",
"{\"jsonrpc\": \"2.0\", \"method\": \"subtract\", \"params\": [42, 23], \"id\": []}",
invalidIDResponse,
400,
},
{
"body has object ID",
"{\"jsonrpc\": \"2.0\", \"method\": \"subtract\", \"params\": [42, 23], \"id\": {}}",
invalidIDResponse,
400,
},
{
"bad method",
"{\"jsonrpc\": \"2.0\", \"method\": 7, \"params\": [42, 23], \"id\": 1}",
parseErrResponse,
400,
},
{
"bad JSON-RPC",
"{\"jsonrpc\": \"1.0\", \"method\": \"subtract\", \"params\": [42, 23], \"id\": 1}",
invalidJSONRPCVersionResponse,
400,
},
{
"omitted method",
"{\"jsonrpc\": \"2.0\", \"params\": [42, 23], \"id\": 1}",
invalidMethodResponse,
400,
},
{
"not whitelisted method",
"{\"jsonrpc\": \"2.0\", \"method\": \"subtract\", \"params\": [42, 23], \"id\": 999}",
notWhitelistedResponse,
403,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
res, code, err := client.SendRequest([]byte(tt.body))
require.NoError(t, err)
RequireEqualJSON(t, []byte(tt.res), res)
require.Equal(t, tt.code, code)
require.Equal(t, 0, len(goodBackend.Requests()))
})
}
}
func TestBatchRPCValidation(t *testing.T) {
goodBackend := NewMockBackend(SingleResponseHandler(200, goodResponse))
defer goodBackend.Close()
require.NoError(t, os.Setenv("GOOD_BACKEND_RPC_URL", goodBackend.URL()))
config := ReadConfig("whitelist")
client := NewProxydClient("http://127.0.0.1:8545")
shutdown, err := proxyd.Start(config)
require.NoError(t, err)
defer shutdown()
tests := []struct {
name string
body string
res string
code int
reqCount int
}{
{
"empty batch",
"[]",
invalidBatchLenResponse,
400,
0,
},
{
"bad json",
"[{,]",
parseErrResponse,
400,
0,
},
{
"not object in batch",
"[123]",
asArray(parseErrResponse),
200,
0,
},
{
"body not RPC",
"[{\"not\": \"rpc\"}]",
asArray(invalidJSONRPCVersionResponse),
200,
0,
},
{
"body missing RPC ID",
"[{\"jsonrpc\": \"2.0\", \"method\": \"subtract\", \"params\": [42, 23]}]",
asArray(invalidIDResponse),
200,
0,
},
{
"body has array ID",
"[{\"jsonrpc\": \"2.0\", \"method\": \"subtract\", \"params\": [42, 23], \"id\": []}]",
asArray(invalidIDResponse),
200,
0,
},
{
"body has object ID",
"[{\"jsonrpc\": \"2.0\", \"method\": \"subtract\", \"params\": [42, 23], \"id\": {}}]",
asArray(invalidIDResponse),
200,
0,
},
// this happens because we can't deserialize the method into a non
// string value, and it blows up the parsing for the whole request.
{
"bad method",
"[{\"error\":{\"code\":-32600,\"message\":\"invalid request\"},\"id\":null,\"jsonrpc\":\"2.0\"}]",
asArray(invalidMethodResponse),
200,
0,
},
{
"bad JSON-RPC",
"[{\"jsonrpc\": \"1.0\", \"method\": \"subtract\", \"params\": [42, 23], \"id\": 1}]",
asArray(invalidJSONRPCVersionResponse),
200,
0,
},
{
"omitted method",
"[{\"jsonrpc\": \"2.0\", \"params\": [42, 23], \"id\": 1}]",
asArray(invalidMethodResponse),
200,
0,
},
{
"not whitelisted method",
"[{\"jsonrpc\": \"2.0\", \"method\": \"subtract\", \"params\": [42, 23], \"id\": 999}]",
asArray(notWhitelistedResponse),
200,
0,
},
{
"mixed",
asArray(
"{\"jsonrpc\": \"2.0\", \"method\": \"subtract\", \"params\": [42, 23], \"id\": 999}",
"{\"jsonrpc\": \"2.0\", \"method\": \"eth_chainId\", \"params\": [], \"id\": 123}",
"123",
),
asArray(
notWhitelistedResponse,
goodResponse,
parseErrResponse,
),
200,
1,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
res, code, err := client.SendRequest([]byte(tt.body))
require.NoError(t, err)
RequireEqualJSON(t, []byte(tt.res), res)
require.Equal(t, tt.code, code)
require.Equal(t, tt.reqCount, len(goodBackend.Requests()))
})
}
}
func asArray(in ...string) string {
return "[" + strings.Join(in, ",") + "]"
}
package proxyd
import (
"context"
"sync"
"time"
"github.com/ethereum/go-ethereum/ethclient"
"github.com/ethereum/go-ethereum/log"
)
const blockHeadSyncPeriod = 1 * time.Second
type LatestBlockHead struct {
url string
client *ethclient.Client
quit chan struct{}
done chan struct{}
mutex sync.RWMutex
blockNum uint64
}
func newLatestBlockHead(url string) (*LatestBlockHead, error) {
client, err := ethclient.Dial(url)
if err != nil {
return nil, err
}
return &LatestBlockHead{
url: url,
client: client,
quit: make(chan struct{}),
done: make(chan struct{}),
}, nil
}
func (h *LatestBlockHead) Start() {
go func() {
ticker := time.NewTicker(blockHeadSyncPeriod)
defer ticker.Stop()
for {
select {
case <-ticker.C:
blockNum, err := h.getBlockNum()
if err != nil {
log.Error("error retrieving latest block number", "error", err)
continue
}
log.Trace("polling block number", "blockNum", blockNum)
h.mutex.Lock()
h.blockNum = blockNum
h.mutex.Unlock()
case <-h.quit:
close(h.done)
return
}
}
}()
}
func (h *LatestBlockHead) getBlockNum() (uint64, error) {
const maxRetries = 5
var err error
for i := 0; i <= maxRetries; i++ {
var blockNum uint64
blockNum, err = h.client.BlockNumber(context.Background())
if err != nil {
backoff := calcBackoff(i)
log.Warn("http operation failed. retrying...", "error", err, "backoff", backoff)
time.Sleep(backoff)
continue
}
return blockNum, nil
}
return 0, wrapErr(err, "exceeded retries")
}
func (h *LatestBlockHead) Stop() {
close(h.quit)
<-h.done
h.client.Close()
}
func (h *LatestBlockHead) GetBlockNum() uint64 {
h.mutex.RLock()
defer h.mutex.RUnlock()
return h.blockNum
}
package proxyd
import (
"context"
"time"
"github.com/ethereum/go-ethereum/ethclient"
"github.com/ethereum/go-ethereum/log"
)
const cacheSyncRate = 1 * time.Second
type lvcUpdateFn func(context.Context, *ethclient.Client) (string, error)
type EthLastValueCache struct {
client *ethclient.Client
cache Cache
key string
updater lvcUpdateFn
quit chan struct{}
}
func newLVC(client *ethclient.Client, cache Cache, cacheKey string, updater lvcUpdateFn) *EthLastValueCache {
return &EthLastValueCache{
client: client,
cache: cache,
key: cacheKey,
updater: updater,
quit: make(chan struct{}),
}
}
func (h *EthLastValueCache) Start() {
go func() {
ticker := time.NewTicker(cacheSyncRate)
defer ticker.Stop()
for {
select {
case <-ticker.C:
lvcPollTimeGauge.WithLabelValues(h.key).SetToCurrentTime()
value, err := h.getUpdate()
if err != nil {
log.Error("error retrieving latest value", "key", h.key, "error", err)
continue
}
log.Trace("polling latest value", "value", value)
if err := h.cache.Put(context.Background(), h.key, value); err != nil {
log.Error("error writing last value to cache", "key", h.key, "error", err)
}
case <-h.quit:
return
}
}
}()
}
func (h *EthLastValueCache) getUpdate() (string, error) {
const maxRetries = 5
var err error
for i := 0; i <= maxRetries; i++ {
var value string
value, err = h.updater(context.Background(), h.client)
if err != nil {
backoff := calcBackoff(i)
log.Warn("http operation failed. retrying...", "error", err, "backoff", backoff)
lvcErrorsTotal.WithLabelValues(h.key).Inc()
time.Sleep(backoff)
continue
}
return value, nil
}
return "", wrapErr(err, "exceeded retries")
}
func (h *EthLastValueCache) Stop() {
close(h.quit)
}
func (h *EthLastValueCache) Read(ctx context.Context) (string, error) {
return h.cache.Get(ctx, h.key)
}
...@@ -5,36 +5,56 @@ import ( ...@@ -5,36 +5,56 @@ import (
"encoding/json" "encoding/json"
"errors" "errors"
"fmt" "fmt"
"sync"
"github.com/ethereum/go-ethereum/common/hexutil" "github.com/ethereum/go-ethereum/common/hexutil"
) )
var errInvalidRPCParams = errors.New("invalid RPC params") var (
errInvalidRPCParams = errors.New("invalid RPC params")
)
type RPCMethodHandler interface { type RPCMethodHandler interface {
CacheKey(req *RPCReq) string GetRPCMethod(context.Context, *RPCReq) (*RPCRes, error)
IsCacheable(req *RPCReq) (bool, error) PutRPCMethod(context.Context, *RPCReq, *RPCRes) error
RequiresUnconfirmedBlocks(ctx context.Context, req *RPCReq) (bool, error)
} }
type StaticRPCMethodHandler struct { type StaticMethodHandler struct {
method string cache interface{}
m sync.RWMutex
} }
func (s *StaticRPCMethodHandler) CacheKey(req *RPCReq) string { func (e *StaticMethodHandler) GetRPCMethod(ctx context.Context, req *RPCReq) (*RPCRes, error) {
return fmt.Sprintf("method:%s", s.method) e.m.RLock()
cache := e.cache
e.m.RUnlock()
if cache == nil {
return nil, nil
}
return &RPCRes{
JSONRPC: req.JSONRPC,
Result: cache,
ID: req.ID,
}, nil
} }
func (s *StaticRPCMethodHandler) IsCacheable(*RPCReq) (bool, error) { return true, nil } func (e *StaticMethodHandler) PutRPCMethod(ctx context.Context, req *RPCReq, res *RPCRes) error {
func (s *StaticRPCMethodHandler) RequiresUnconfirmedBlocks(context.Context, *RPCReq) (bool, error) { e.m.Lock()
return false, nil if e.cache == nil {
e.cache = res.Result
}
e.m.Unlock()
return nil
} }
type EthGetBlockByNumberMethod struct { type EthGetBlockByNumberMethodHandler struct {
getLatestBlockNumFn GetLatestBlockNumFn cache Cache
getLatestBlockNumFn GetLatestBlockNumFn
numBlockConfirmations int
} }
func (e *EthGetBlockByNumberMethod) CacheKey(req *RPCReq) string { func (e *EthGetBlockByNumberMethodHandler) cacheKey(req *RPCReq) string {
input, includeTx, err := decodeGetBlockByNumberParams(req.Params) input, includeTx, err := decodeGetBlockByNumberParams(req.Params)
if err != nil { if err != nil {
return "" return ""
...@@ -42,7 +62,7 @@ func (e *EthGetBlockByNumberMethod) CacheKey(req *RPCReq) string { ...@@ -42,7 +62,7 @@ func (e *EthGetBlockByNumberMethod) CacheKey(req *RPCReq) string {
return fmt.Sprintf("method:eth_getBlockByNumber:%s:%t", input, includeTx) return fmt.Sprintf("method:eth_getBlockByNumber:%s:%t", input, includeTx)
} }
func (e *EthGetBlockByNumberMethod) IsCacheable(req *RPCReq) (bool, error) { func (e *EthGetBlockByNumberMethodHandler) cacheable(req *RPCReq) (bool, error) {
blockNum, _, err := decodeGetBlockByNumberParams(req.Params) blockNum, _, err := decodeGetBlockByNumberParams(req.Params)
if err != nil { if err != nil {
return false, err return false, err
...@@ -50,33 +70,51 @@ func (e *EthGetBlockByNumberMethod) IsCacheable(req *RPCReq) (bool, error) { ...@@ -50,33 +70,51 @@ func (e *EthGetBlockByNumberMethod) IsCacheable(req *RPCReq) (bool, error) {
return !isBlockDependentParam(blockNum), nil return !isBlockDependentParam(blockNum), nil
} }
func (e *EthGetBlockByNumberMethod) RequiresUnconfirmedBlocks(ctx context.Context, req *RPCReq) (bool, error) { func (e *EthGetBlockByNumberMethodHandler) GetRPCMethod(ctx context.Context, req *RPCReq) (*RPCRes, error) {
curBlock, err := e.getLatestBlockNumFn(ctx) if ok, err := e.cacheable(req); !ok || err != nil {
if err != nil { return nil, err
return false, err }
key := e.cacheKey(req)
return getImmutableRPCResponse(ctx, e.cache, key, req)
}
func (e *EthGetBlockByNumberMethodHandler) PutRPCMethod(ctx context.Context, req *RPCReq, res *RPCRes) error {
if ok, err := e.cacheable(req); !ok || err != nil {
return err
} }
blockInput, _, err := decodeGetBlockByNumberParams(req.Params) blockInput, _, err := decodeGetBlockByNumberParams(req.Params)
if err != nil { if err != nil {
return false, err return err
} }
if isBlockDependentParam(blockInput) { if isBlockDependentParam(blockInput) {
return true, nil return nil
} }
if blockInput == "earliest" { if blockInput != "earliest" {
return false, nil curBlock, err := e.getLatestBlockNumFn(ctx)
} if err != nil {
blockNum, err := decodeBlockInput(blockInput) return err
if err != nil { }
return false, err blockNum, err := decodeBlockInput(blockInput)
if err != nil {
return err
}
if curBlock <= blockNum+uint64(e.numBlockConfirmations) {
return nil
}
} }
return curBlock <= blockNum+numBlockConfirmations, nil
key := e.cacheKey(req)
return putImmutableRPCResponse(ctx, e.cache, key, req, res)
} }
type EthGetBlockRangeMethod struct { type EthGetBlockRangeMethodHandler struct {
getLatestBlockNumFn GetLatestBlockNumFn cache Cache
getLatestBlockNumFn GetLatestBlockNumFn
numBlockConfirmations int
} }
func (e *EthGetBlockRangeMethod) CacheKey(req *RPCReq) string { func (e *EthGetBlockRangeMethodHandler) cacheKey(req *RPCReq) string {
start, end, includeTx, err := decodeGetBlockRangeParams(req.Params) start, end, includeTx, err := decodeGetBlockRangeParams(req.Params)
if err != nil { if err != nil {
return "" return ""
...@@ -84,7 +122,7 @@ func (e *EthGetBlockRangeMethod) CacheKey(req *RPCReq) string { ...@@ -84,7 +122,7 @@ func (e *EthGetBlockRangeMethod) CacheKey(req *RPCReq) string {
return fmt.Sprintf("method:eth_getBlockRange:%s:%s:%t", start, end, includeTx) return fmt.Sprintf("method:eth_getBlockRange:%s:%s:%t", start, end, includeTx)
} }
func (e *EthGetBlockRangeMethod) IsCacheable(req *RPCReq) (bool, error) { func (e *EthGetBlockRangeMethodHandler) cacheable(req *RPCReq) (bool, error) {
start, end, _, err := decodeGetBlockRangeParams(req.Params) start, end, _, err := decodeGetBlockRangeParams(req.Params)
if err != nil { if err != nil {
return false, err return false, err
...@@ -92,42 +130,144 @@ func (e *EthGetBlockRangeMethod) IsCacheable(req *RPCReq) (bool, error) { ...@@ -92,42 +130,144 @@ func (e *EthGetBlockRangeMethod) IsCacheable(req *RPCReq) (bool, error) {
return !isBlockDependentParam(start) && !isBlockDependentParam(end), nil return !isBlockDependentParam(start) && !isBlockDependentParam(end), nil
} }
func (e *EthGetBlockRangeMethod) RequiresUnconfirmedBlocks(ctx context.Context, req *RPCReq) (bool, error) { func (e *EthGetBlockRangeMethodHandler) GetRPCMethod(ctx context.Context, req *RPCReq) (*RPCRes, error) {
curBlock, err := e.getLatestBlockNumFn(ctx) if ok, err := e.cacheable(req); !ok || err != nil {
if err != nil { return nil, err
return false, err }
key := e.cacheKey(req)
return getImmutableRPCResponse(ctx, e.cache, key, req)
}
func (e *EthGetBlockRangeMethodHandler) PutRPCMethod(ctx context.Context, req *RPCReq, res *RPCRes) error {
if ok, err := e.cacheable(req); !ok || err != nil {
return err
} }
start, end, _, err := decodeGetBlockRangeParams(req.Params) start, end, _, err := decodeGetBlockRangeParams(req.Params)
if err != nil { if err != nil {
return false, err return err
}
if isBlockDependentParam(start) || isBlockDependentParam(end) {
return true, nil
} }
if start == "earliest" && end == "earliest" { curBlock, err := e.getLatestBlockNumFn(ctx)
return false, nil if err != nil {
return err
} }
if start != "earliest" { if start != "earliest" {
startNum, err := decodeBlockInput(start) startNum, err := decodeBlockInput(start)
if err != nil { if err != nil {
return false, err return err
} }
if curBlock <= startNum+numBlockConfirmations { if curBlock <= startNum+uint64(e.numBlockConfirmations) {
return true, nil return nil
} }
} }
if end != "earliest" { if end != "earliest" {
endNum, err := decodeBlockInput(end) endNum, err := decodeBlockInput(end)
if err != nil { if err != nil {
return false, err return err
} }
if curBlock <= endNum+numBlockConfirmations { if curBlock <= endNum+uint64(e.numBlockConfirmations) {
return true, nil return nil
} }
} }
return false, nil
key := e.cacheKey(req)
return putImmutableRPCResponse(ctx, e.cache, key, req, res)
}
type EthCallMethodHandler struct {
cache Cache
getLatestBlockNumFn GetLatestBlockNumFn
numBlockConfirmations int
}
func (e *EthCallMethodHandler) cacheable(params *ethCallParams, blockTag string) bool {
if isBlockDependentParam(blockTag) {
return false
}
if params.From != "" || params.Gas != "" {
return false
}
if params.Value != "" && params.Value != "0x0" {
return false
}
return true
}
func (e *EthCallMethodHandler) cacheKey(params *ethCallParams, blockTag string) string {
keyParams := fmt.Sprintf("%s:%s:%s", params.To, params.Data, blockTag)
return fmt.Sprintf("method:eth_call:%s", keyParams)
}
func (e *EthCallMethodHandler) GetRPCMethod(ctx context.Context, req *RPCReq) (*RPCRes, error) {
params, blockTag, err := decodeEthCallParams(req)
if err != nil {
return nil, err
}
if !e.cacheable(params, blockTag) {
return nil, nil
}
key := e.cacheKey(params, blockTag)
return getImmutableRPCResponse(ctx, e.cache, key, req)
}
func (e *EthCallMethodHandler) PutRPCMethod(ctx context.Context, req *RPCReq, res *RPCRes) error {
params, blockTag, err := decodeEthCallParams(req)
if err != nil {
return err
}
if !e.cacheable(params, blockTag) {
return nil
}
if blockTag != "earliest" {
curBlock, err := e.getLatestBlockNumFn(ctx)
if err != nil {
return err
}
blockNum, err := decodeBlockInput(blockTag)
if err != nil {
return err
}
if curBlock <= blockNum+uint64(e.numBlockConfirmations) {
return nil
}
}
key := e.cacheKey(params, blockTag)
return putImmutableRPCResponse(ctx, e.cache, key, req, res)
}
type EthBlockNumberMethodHandler struct {
getLatestBlockNumFn GetLatestBlockNumFn
}
func (e *EthBlockNumberMethodHandler) GetRPCMethod(ctx context.Context, req *RPCReq) (*RPCRes, error) {
blockNum, err := e.getLatestBlockNumFn(ctx)
if err != nil {
return nil, err
}
return makeRPCRes(req, hexutil.EncodeUint64(blockNum)), nil
}
func (e *EthBlockNumberMethodHandler) PutRPCMethod(context.Context, *RPCReq, *RPCRes) error {
return nil
}
type EthGasPriceMethodHandler struct {
getLatestGasPrice GetLatestGasPriceFn
}
func (e *EthGasPriceMethodHandler) GetRPCMethod(ctx context.Context, req *RPCReq) (*RPCRes, error) {
gasPrice, err := e.getLatestGasPrice(ctx)
if err != nil {
return nil, err
}
return makeRPCRes(req, hexutil.EncodeUint64(gasPrice)), nil
}
func (e *EthGasPriceMethodHandler) PutRPCMethod(context.Context, *RPCReq, *RPCRes) error {
return nil
} }
func isBlockDependentParam(s string) bool { func isBlockDependentParam(s string) bool {
...@@ -186,6 +326,34 @@ func decodeBlockInput(input string) (uint64, error) { ...@@ -186,6 +326,34 @@ func decodeBlockInput(input string) (uint64, error) {
return hexutil.DecodeUint64(input) return hexutil.DecodeUint64(input)
} }
type ethCallParams struct {
From string `json:"from"`
To string `json:"to"`
Gas string `json:"gas"`
GasPrice string `json:"gasPrice"`
Value string `json:"value"`
Data string `json:"data"`
}
func decodeEthCallParams(req *RPCReq) (*ethCallParams, string, error) {
var input []json.RawMessage
if err := json.Unmarshal(req.Params, &input); err != nil {
return nil, "", err
}
if len(input) != 2 {
return nil, "", fmt.Errorf("invalid eth_call parameters")
}
params := new(ethCallParams)
if err := json.Unmarshal(input[0], params); err != nil {
return nil, "", err
}
var blockTag string
if err := json.Unmarshal(input[1], &blockTag); err != nil {
return nil, "", err
}
return params, blockTag, nil
}
func validBlockInput(input string) bool { func validBlockInput(input string) bool {
if input == "earliest" || input == "pending" || input == "latest" { if input == "earliest" || input == "pending" || input == "latest" {
return true return true
...@@ -193,3 +361,39 @@ func validBlockInput(input string) bool { ...@@ -193,3 +361,39 @@ func validBlockInput(input string) bool {
_, err := decodeBlockInput(input) _, err := decodeBlockInput(input)
return err == nil return err == nil
} }
func makeRPCRes(req *RPCReq, result interface{}) *RPCRes {
return &RPCRes{
JSONRPC: JSONRPCVersion,
ID: req.ID,
Result: result,
}
}
func getImmutableRPCResponse(ctx context.Context, cache Cache, key string, req *RPCReq) (*RPCRes, error) {
val, err := cache.Get(ctx, key)
if err != nil {
return nil, err
}
if val == "" {
return nil, nil
}
var result interface{}
if err := json.Unmarshal([]byte(val), &result); err != nil {
return nil, err
}
return &RPCRes{
JSONRPC: req.JSONRPC,
Result: result,
ID: req.ID,
}, nil
}
func putImmutableRPCResponse(ctx context.Context, cache Cache, key string, req *RPCReq, res *RPCRes) error {
if key == "" {
return nil
}
val := mustMarshalJSON(res.Result)
return cache.Put(ctx, key, string(val))
}
...@@ -145,22 +145,53 @@ var ( ...@@ -145,22 +145,53 @@ var (
requestPayloadSizesGauge = promauto.NewHistogramVec(prometheus.HistogramOpts{ requestPayloadSizesGauge = promauto.NewHistogramVec(prometheus.HistogramOpts{
Namespace: MetricsNamespace, Namespace: MetricsNamespace,
Name: "request_payload_sizes", Name: "request_payload_sizes",
Help: "Gauge of client request payload sizes.", Help: "Histogram of client request payload sizes.",
Buckets: PayloadSizeBuckets, Buckets: PayloadSizeBuckets,
}, []string{ }, []string{
"auth", "auth",
"method_name",
}) })
responsePayloadSizesGauge = promauto.NewHistogramVec(prometheus.HistogramOpts{ responsePayloadSizesGauge = promauto.NewHistogramVec(prometheus.HistogramOpts{
Namespace: MetricsNamespace, Namespace: MetricsNamespace,
Name: "response_payload_sizes", Name: "response_payload_sizes",
Help: "Gauge of client response payload sizes.", Help: "Histogram of client response payload sizes.",
Buckets: PayloadSizeBuckets, Buckets: PayloadSizeBuckets,
}, []string{ }, []string{
"auth", "auth",
}) })
cacheHitsTotal = promauto.NewCounterVec(prometheus.CounterOpts{
Namespace: MetricsNamespace,
Name: "cache_hits_total",
Help: "Number of cache hits.",
}, []string{
"method",
})
cacheMissesTotal = promauto.NewCounterVec(prometheus.CounterOpts{
Namespace: MetricsNamespace,
Name: "cache_misses_total",
Help: "Number of cache misses.",
}, []string{
"method",
})
lvcErrorsTotal = promauto.NewCounterVec(prometheus.CounterOpts{
Namespace: MetricsNamespace,
Name: "lvc_errors_total",
Help: "Count of lvc errors.",
}, []string{
"key",
})
lvcPollTimeGauge = promauto.NewGaugeVec(prometheus.GaugeOpts{
Namespace: MetricsNamespace,
Name: "lvc_poll_time_gauge",
Help: "Gauge of lvc poll time.",
}, []string{
"key",
})
rpcSpecialErrors = []string{ rpcSpecialErrors = []string{
"nonce too low", "nonce too low",
"gas price too high", "gas price too high",
...@@ -208,10 +239,18 @@ func MaybeRecordSpecialRPCError(ctx context.Context, backendName, method string, ...@@ -208,10 +239,18 @@ func MaybeRecordSpecialRPCError(ctx context.Context, backendName, method string,
} }
} }
func RecordRequestPayloadSize(ctx context.Context, method string, payloadSize int) { func RecordRequestPayloadSize(ctx context.Context, payloadSize int) {
requestPayloadSizesGauge.WithLabelValues(GetAuthCtx(ctx), method).Observe(float64(payloadSize)) requestPayloadSizesGauge.WithLabelValues(GetAuthCtx(ctx)).Observe(float64(payloadSize))
} }
func RecordResponsePayloadSize(ctx context.Context, payloadSize int) { func RecordResponsePayloadSize(ctx context.Context, payloadSize int) {
responsePayloadSizesGauge.WithLabelValues(GetAuthCtx(ctx)).Observe(float64(payloadSize)) responsePayloadSizesGauge.WithLabelValues(GetAuthCtx(ctx)).Observe(float64(payloadSize))
} }
func RecordCacheHit(method string) {
cacheHitsTotal.WithLabelValues(method).Inc()
}
func RecordCacheMiss(method string) {
cacheMissesTotal.WithLabelValues(method).Inc()
}
...@@ -7,40 +7,49 @@ import ( ...@@ -7,40 +7,49 @@ import (
"fmt" "fmt"
"net/http" "net/http"
"os" "os"
"os/signal" "strconv"
"syscall"
"time" "time"
"github.com/ethereum/go-ethereum/ethclient"
"github.com/ethereum/go-ethereum/log" "github.com/ethereum/go-ethereum/log"
"github.com/prometheus/client_golang/prometheus/promhttp" "github.com/prometheus/client_golang/prometheus/promhttp"
) )
func Start(config *Config) error { func Start(config *Config) (func(), error) {
if len(config.Backends) == 0 { if len(config.Backends) == 0 {
return errors.New("must define at least one backend") return nil, errors.New("must define at least one backend")
} }
if len(config.BackendGroups) == 0 { if len(config.BackendGroups) == 0 {
return errors.New("must define at least one backend group") return nil, errors.New("must define at least one backend group")
} }
if len(config.RPCMethodMappings) == 0 { if len(config.RPCMethodMappings) == 0 {
return errors.New("must define at least one RPC method mapping") return nil, errors.New("must define at least one RPC method mapping")
} }
for authKey := range config.Authentication { for authKey := range config.Authentication {
if authKey == "none" { if authKey == "none" {
return errors.New("cannot use none as an auth key") return nil, errors.New("cannot use none as an auth key")
} }
} }
var redisURL string
if config.Redis.URL != "" {
rURL, err := ReadFromEnvOrConfig(config.Redis.URL)
if err != nil {
return nil, err
}
redisURL = rURL
}
var lim RateLimiter var lim RateLimiter
var err error var err error
if config.Redis == nil { if redisURL == "" {
log.Warn("redis is not configured, using local rate limiter") log.Warn("redis is not configured, using local rate limiter")
lim = NewLocalRateLimiter() lim = NewLocalRateLimiter()
} else { } else {
lim, err = NewRedisRateLimiter(config.Redis.URL) lim, err = NewRedisRateLimiter(redisURL)
if err != nil { if err != nil {
return err return nil, err
} }
} }
...@@ -51,17 +60,17 @@ func Start(config *Config) error { ...@@ -51,17 +60,17 @@ func Start(config *Config) error {
rpcURL, err := ReadFromEnvOrConfig(cfg.RPCURL) rpcURL, err := ReadFromEnvOrConfig(cfg.RPCURL)
if err != nil { if err != nil {
return err return nil, err
} }
wsURL, err := ReadFromEnvOrConfig(cfg.WSURL) wsURL, err := ReadFromEnvOrConfig(cfg.WSURL)
if err != nil { if err != nil {
return err return nil, err
} }
if rpcURL == "" { if rpcURL == "" {
return fmt.Errorf("must define an RPC URL for backend %s", name) return nil, fmt.Errorf("must define an RPC URL for backend %s", name)
} }
if wsURL == "" { if wsURL == "" {
return fmt.Errorf("must define a WS URL for backend %s", name) return nil, fmt.Errorf("must define a WS URL for backend %s", name)
} }
if config.BackendOptions.ResponseTimeoutSeconds != 0 { if config.BackendOptions.ResponseTimeoutSeconds != 0 {
...@@ -86,13 +95,13 @@ func Start(config *Config) error { ...@@ -86,13 +95,13 @@ func Start(config *Config) error {
if cfg.Password != "" { if cfg.Password != "" {
passwordVal, err := ReadFromEnvOrConfig(cfg.Password) passwordVal, err := ReadFromEnvOrConfig(cfg.Password)
if err != nil { if err != nil {
return err return nil, err
} }
opts = append(opts, WithBasicAuth(cfg.Username, passwordVal)) opts = append(opts, WithBasicAuth(cfg.Username, passwordVal))
} }
tlsConfig, err := configureBackendTLS(cfg) tlsConfig, err := configureBackendTLS(cfg)
if err != nil { if err != nil {
return err return nil, err
} }
if tlsConfig != nil { if tlsConfig != nil {
log.Info("using custom TLS config for backend", "name", name) log.Info("using custom TLS config for backend", "name", name)
...@@ -113,7 +122,7 @@ func Start(config *Config) error { ...@@ -113,7 +122,7 @@ func Start(config *Config) error {
backends := make([]*Backend, 0) backends := make([]*Backend, 0)
for _, bName := range bg.Backends { for _, bName := range bg.Backends {
if backendsByName[bName] == nil { if backendsByName[bName] == nil {
return fmt.Errorf("backend %s is not defined", bName) return nil, fmt.Errorf("backend %s is not defined", bName)
} }
backends = append(backends, backendsByName[bName]) backends = append(backends, backendsByName[bName])
} }
...@@ -128,17 +137,17 @@ func Start(config *Config) error { ...@@ -128,17 +137,17 @@ func Start(config *Config) error {
if config.WSBackendGroup != "" { if config.WSBackendGroup != "" {
wsBackendGroup = backendGroups[config.WSBackendGroup] wsBackendGroup = backendGroups[config.WSBackendGroup]
if wsBackendGroup == nil { if wsBackendGroup == nil {
return fmt.Errorf("ws backend group %s does not exist", config.WSBackendGroup) return nil, fmt.Errorf("ws backend group %s does not exist", config.WSBackendGroup)
} }
} }
if wsBackendGroup == nil && config.Server.WSPort != 0 { if wsBackendGroup == nil && config.Server.WSPort != 0 {
return fmt.Errorf("a ws port was defined, but no ws group was defined") return nil, fmt.Errorf("a ws port was defined, but no ws group was defined")
} }
for _, bg := range config.RPCMethodMappings { for _, bg := range config.RPCMethodMappings {
if backendGroups[bg] == nil { if backendGroups[bg] == nil {
return fmt.Errorf("undefined backend group %s", bg) return nil, fmt.Errorf("undefined backend group %s", bg)
} }
} }
...@@ -149,39 +158,50 @@ func Start(config *Config) error { ...@@ -149,39 +158,50 @@ func Start(config *Config) error {
for secret, alias := range config.Authentication { for secret, alias := range config.Authentication {
resolvedSecret, err := ReadFromEnvOrConfig(secret) resolvedSecret, err := ReadFromEnvOrConfig(secret)
if err != nil { if err != nil {
return err return nil, err
} }
resolvedAuth[resolvedSecret] = alias resolvedAuth[resolvedSecret] = alias
} }
} }
var rpcCache RPCCache var (
if config.Cache != nil && config.Cache.Enabled { rpcCache RPCCache
var cache Cache blockNumLVC *EthLastValueCache
if config.Redis != nil { gasPriceLVC *EthLastValueCache
if cache, err = newRedisCache(config.Redis.URL); err != nil { )
return err if config.Cache.Enabled {
var (
cache Cache
blockNumFn GetLatestBlockNumFn
gasPriceFn GetLatestGasPriceFn
)
if config.Cache.BlockSyncRPCURL == "" {
return nil, fmt.Errorf("block sync node required for caching")
}
blockSyncRPCURL, err := ReadFromEnvOrConfig(config.Cache.BlockSyncRPCURL)
if err != nil {
return nil, err
}
if redisURL != "" {
if cache, err = newRedisCache(redisURL); err != nil {
return nil, err
} }
} else { } else {
log.Warn("redis is not configured, using in-memory cache") log.Warn("redis is not configured, using in-memory cache")
cache = newMemoryCache() cache = newMemoryCache()
} }
// Ideally, the BlocKSyncRPCURL should be the sequencer or a HA replica that's not far behind
var getLatestBlockNumFn GetLatestBlockNumFn ethClient, err := ethclient.Dial(blockSyncRPCURL)
if config.Cache.BlockSyncRPCURL == "" {
return fmt.Errorf("block sync node required for caching")
}
latestHead, err := newLatestBlockHead(config.Cache.BlockSyncRPCURL)
if err != nil { if err != nil {
return err return nil, err
} }
latestHead.Start() defer ethClient.Close()
defer latestHead.Stop()
getLatestBlockNumFn = func(ctx context.Context) (uint64, error) { blockNumLVC, blockNumFn = makeGetLatestBlockNumFn(ethClient, cache)
return latestHead.GetBlockNum(), nil gasPriceLVC, gasPriceFn = makeGetLatestGasPriceFn(ethClient, cache)
} rpcCache = newRPCCache(newCacheWithCompression(cache), blockNumFn, gasPriceFn, config.Cache.NumBlockConfirmations)
rpcCache = newRPCCache(cache, getLatestBlockNumFn)
} }
srv := NewServer( srv := NewServer(
...@@ -194,12 +214,17 @@ func Start(config *Config) error { ...@@ -194,12 +214,17 @@ func Start(config *Config) error {
rpcCache, rpcCache,
) )
if config.Metrics != nil && config.Metrics.Enabled { if config.Metrics.Enabled {
addr := fmt.Sprintf("%s:%d", config.Metrics.Host, config.Metrics.Port) addr := fmt.Sprintf("%s:%d", config.Metrics.Host, config.Metrics.Port)
log.Info("starting metrics server", "addr", addr) log.Info("starting metrics server", "addr", addr)
go http.ListenAndServe(addr, promhttp.Handler()) go http.ListenAndServe(addr, promhttp.Handler())
} }
// To allow integration tests to cleanly come up, wait
// 10ms to give the below goroutines enough time to
// encounter an error creating their servers
errTimer := time.NewTimer(10 * time.Millisecond)
if config.Server.RPCPort != 0 { if config.Server.RPCPort != 0 {
go func() { go func() {
if err := srv.RPCListenAndServe(config.Server.RPCHost, config.Server.RPCPort); err != nil { if err := srv.RPCListenAndServe(config.Server.RPCHost, config.Server.RPCPort); err != nil {
...@@ -224,15 +249,23 @@ func Start(config *Config) error { ...@@ -224,15 +249,23 @@ func Start(config *Config) error {
}() }()
} }
sig := make(chan os.Signal, 1) <-errTimer.C
signal.Notify(sig, syscall.SIGINT, syscall.SIGTERM) log.Info("started proxyd")
recvSig := <-sig
log.Info("caught signal, shutting down", "signal", recvSig) return func() {
srv.Shutdown() log.Info("shutting down proxyd")
if err := lim.FlushBackendWSConns(backendNames); err != nil { if blockNumLVC != nil {
log.Error("error flushing backend ws conns", "err", err) blockNumLVC.Stop()
} }
return nil if gasPriceLVC != nil {
gasPriceLVC.Stop()
}
srv.Shutdown()
if err := lim.FlushBackendWSConns(backendNames); err != nil {
log.Error("error flushing backend ws conns", "err", err)
}
log.Info("goodbye")
}, nil
} }
func secondsToDuration(seconds int) time.Duration { func secondsToDuration(seconds int) time.Duration {
...@@ -259,3 +292,39 @@ func configureBackendTLS(cfg *BackendConfig) (*tls.Config, error) { ...@@ -259,3 +292,39 @@ func configureBackendTLS(cfg *BackendConfig) (*tls.Config, error) {
return tlsConfig, nil return tlsConfig, nil
} }
func makeUint64LastValueFn(client *ethclient.Client, cache Cache, key string, updater lvcUpdateFn) (*EthLastValueCache, func(context.Context) (uint64, error)) {
lvc := newLVC(client, cache, key, updater)
lvc.Start()
return lvc, func(ctx context.Context) (uint64, error) {
value, err := lvc.Read(ctx)
if err != nil {
return 0, err
}
if value == "" {
return 0, fmt.Errorf("%s is unavailable", key)
}
valueUint, err := strconv.ParseUint(value, 10, 64)
if err != nil {
return 0, err
}
return valueUint, nil
}
}
func makeGetLatestBlockNumFn(client *ethclient.Client, cache Cache) (*EthLastValueCache, GetLatestBlockNumFn) {
return makeUint64LastValueFn(client, cache, "lvc:block_number", func(ctx context.Context, c *ethclient.Client) (string, error) {
blockNum, err := c.BlockNumber(ctx)
return strconv.FormatUint(blockNum, 10), err
})
}
func makeGetLatestGasPriceFn(client *ethclient.Client, cache Cache) (*EthLastValueCache, GetLatestGasPriceFn) {
return makeUint64LastValueFn(client, cache, "lvc:gas_price", func(ctx context.Context, c *ethclient.Client) (string, error) {
gasPrice, err := c.SuggestGasPrice(ctx)
if err != nil {
return "", err
}
return gasPrice.String(), nil
})
}
...@@ -15,16 +15,46 @@ type RPCReq struct { ...@@ -15,16 +15,46 @@ type RPCReq struct {
} }
type RPCRes struct { type RPCRes struct {
JSONRPC string
Result interface{}
Error *RPCErr
ID json.RawMessage
}
type rpcResJSON struct {
JSONRPC string `json:"jsonrpc"` JSONRPC string `json:"jsonrpc"`
Result interface{} `json:"result,omitempty"` Result interface{} `json:"result,omitempty"`
Error *RPCErr `json:"error,omitempty"` Error *RPCErr `json:"error,omitempty"`
ID json.RawMessage `json:"id"` ID json.RawMessage `json:"id"`
} }
type nullResultRPCRes struct {
JSONRPC string `json:"jsonrpc"`
Result interface{} `json:"result"`
ID json.RawMessage `json:"id"`
}
func (r *RPCRes) IsError() bool { func (r *RPCRes) IsError() bool {
return r.Error != nil return r.Error != nil
} }
func (r *RPCRes) MarshalJSON() ([]byte, error) {
if r.Result == nil && r.Error == nil {
return json.Marshal(&nullResultRPCRes{
JSONRPC: r.JSONRPC,
Result: nil,
ID: r.ID,
})
}
return json.Marshal(&rpcResJSON{
JSONRPC: r.JSONRPC,
Result: r.Result,
Error: r.Error,
ID: r.ID,
})
}
type RPCErr struct { type RPCErr struct {
Code int `json:"code"` Code int `json:"code"`
Message string `json:"message"` Message string `json:"message"`
...@@ -46,30 +76,22 @@ func IsValidID(id json.RawMessage) bool { ...@@ -46,30 +76,22 @@ func IsValidID(id json.RawMessage) bool {
return len(id) > 0 && id[0] != '{' && id[0] != '[' return len(id) > 0 && id[0] != '{' && id[0] != '['
} }
func ParseRPCReq(r io.Reader) (*RPCReq, error) { func ParseRPCReq(body []byte) (*RPCReq, error) {
body, err := ioutil.ReadAll(r)
if err != nil {
return nil, wrapErr(err, "error reading request body")
}
req := new(RPCReq) req := new(RPCReq)
if err := json.Unmarshal(body, req); err != nil { if err := json.Unmarshal(body, req); err != nil {
return nil, ErrParseErr return nil, ErrParseErr
} }
if req.JSONRPC != JSONRPCVersion { return req, nil
return nil, ErrInvalidRequest("invalid JSON-RPC version") }
}
if req.Method == "" {
return nil, ErrInvalidRequest("no method specified")
}
if !IsValidID(req.ID) { func ParseBatchRPCReq(body []byte) ([]json.RawMessage, error) {
return nil, ErrInvalidRequest("invalid ID") batch := make([]json.RawMessage, 0)
if err := json.Unmarshal(body, &batch); err != nil {
return nil, err
} }
return req, nil return batch, nil
} }
func ParseRPCRes(r io.Reader) (*RPCRes, error) { func ParseRPCRes(r io.Reader) (*RPCRes, error) {
...@@ -86,6 +108,22 @@ func ParseRPCRes(r io.Reader) (*RPCRes, error) { ...@@ -86,6 +108,22 @@ func ParseRPCRes(r io.Reader) (*RPCRes, error) {
return res, nil return res, nil
} }
func ValidateRPCReq(req *RPCReq) error {
if req.JSONRPC != JSONRPCVersion {
return ErrInvalidRequest("invalid JSON-RPC version")
}
if req.Method == "" {
return ErrInvalidRequest("no method specified")
}
if !IsValidID(req.ID) {
return ErrInvalidRequest("invalid ID")
}
return nil
}
func NewRPCErrorRes(id json.RawMessage, err error) *RPCRes { func NewRPCErrorRes(id json.RawMessage, err error) *RPCRes {
var rpcErr *RPCErr var rpcErr *RPCErr
if rr, ok := err.(*RPCErr); ok { if rr, ok := err.(*RPCErr); ok {
...@@ -103,3 +141,14 @@ func NewRPCErrorRes(id json.RawMessage, err error) *RPCRes { ...@@ -103,3 +141,14 @@ func NewRPCErrorRes(id json.RawMessage, err error) *RPCRes {
ID: id, ID: id,
} }
} }
func IsBatch(raw []byte) bool {
for _, c := range raw {
// skip insignificant whitespace (http://www.ietf.org/rfc/rfc4627.txt)
if c == 0x20 || c == 0x09 || c == 0x0a || c == 0x0d {
continue
}
return c == '['
}
return false
}
package proxyd
import (
"encoding/json"
"github.com/stretchr/testify/require"
"testing"
)
func TestRPCResJSON(t *testing.T) {
tests := []struct {
name string
in *RPCRes
out string
}{
{
"string result",
&RPCRes{
JSONRPC: JSONRPCVersion,
Result: "foobar",
ID: []byte("123"),
},
`{"jsonrpc":"2.0","result":"foobar","id":123}`,
},
{
"object result",
&RPCRes{
JSONRPC: JSONRPCVersion,
Result: struct {
Str string `json:"str"`
}{
"test",
},
ID: []byte("123"),
},
`{"jsonrpc":"2.0","result":{"str":"test"},"id":123}`,
},
{
"nil result",
&RPCRes{
JSONRPC: JSONRPCVersion,
Result: nil,
ID: []byte("123"),
},
`{"jsonrpc":"2.0","result":null,"id":123}`,
},
{
"error result",
&RPCRes{
JSONRPC: JSONRPCVersion,
Error: &RPCErr{
Code: 1234,
Message: "test err",
},
ID: []byte("123"),
},
`{"jsonrpc":"2.0","error":{"code":1234,"message":"test err"},"id":123}`,
},
{
"string ID",
&RPCRes{
JSONRPC: JSONRPCVersion,
Result: "foobar",
ID: []byte("\"123\""),
},
`{"jsonrpc":"2.0","result":"foobar","id":"123"}`,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
out, err := json.Marshal(tt.in)
require.NoError(t, err)
require.Equal(t, tt.out, string(out))
})
}
}
...@@ -6,6 +6,8 @@ import ( ...@@ -6,6 +6,8 @@ import (
"errors" "errors"
"fmt" "fmt"
"io" "io"
"io/ioutil"
"math"
"net/http" "net/http"
"strconv" "strconv"
"strings" "strings"
...@@ -22,6 +24,7 @@ const ( ...@@ -22,6 +24,7 @@ const (
ContextKeyAuth = "authorization" ContextKeyAuth = "authorization"
ContextKeyReqID = "req_id" ContextKeyReqID = "req_id"
ContextKeyXForwardedFor = "x_forwarded_for" ContextKeyXForwardedFor = "x_forwarded_for"
MaxBatchRPCCalls = 100
) )
type Server struct { type Server struct {
...@@ -49,6 +52,11 @@ func NewServer( ...@@ -49,6 +52,11 @@ func NewServer(
if cache == nil { if cache == nil {
cache = &NoopRPCCache{} cache = &NoopRPCCache{}
} }
if maxBodySize == 0 {
maxBodySize = math.MaxInt64
}
return &Server{ return &Server{
backendGroups: backendGroups, backendGroups: backendGroups,
wsBackendGroup: wsBackendGroup, wsBackendGroup: wsBackendGroup,
...@@ -122,15 +130,66 @@ func (s *Server) HandleRPC(w http.ResponseWriter, r *http.Request) { ...@@ -122,15 +130,66 @@ func (s *Server) HandleRPC(w http.ResponseWriter, r *http.Request) {
"user_agent", r.Header.Get("user-agent"), "user_agent", r.Header.Get("user-agent"),
) )
bodyReader := &recordLenReader{Reader: io.LimitReader(r.Body, s.maxBodySize)} body, err := ioutil.ReadAll(io.LimitReader(r.Body, s.maxBodySize))
req, err := ParseRPCReq(bodyReader)
if err != nil { if err != nil {
log.Info("rejected request with bad rpc request", "source", "rpc", "err", err) log.Error("error reading request body", "err", err)
RecordRPCError(ctx, BackendProxyd, MethodUnknown, err) writeRPCError(ctx, w, nil, ErrInternal)
return
}
RecordRequestPayloadSize(ctx, len(body))
if IsBatch(body) {
reqs, err := ParseBatchRPCReq(body)
if err != nil {
log.Error("error parsing batch RPC request", "err", err)
RecordRPCError(ctx, BackendProxyd, MethodUnknown, err)
writeRPCError(ctx, w, nil, ErrParseErr)
return
}
if len(reqs) > MaxBatchRPCCalls {
RecordRPCError(ctx, BackendProxyd, MethodUnknown, ErrTooManyBatchRequests)
writeRPCError(ctx, w, nil, ErrTooManyBatchRequests)
return
}
if len(reqs) == 0 {
writeRPCError(ctx, w, nil, ErrInvalidRequest("must specify at least one batch call"))
return
}
batchRes := make([]*RPCRes, len(reqs), len(reqs))
for i := 0; i < len(reqs); i++ {
req, err := ParseRPCReq(reqs[i])
if err != nil {
log.Info("error parsing RPC call", "source", "rpc", "err", err)
batchRes[i] = NewRPCErrorRes(nil, err)
continue
}
batchRes[i] = s.handleSingleRPC(ctx, req)
}
writeBatchRPCRes(ctx, w, batchRes)
return
}
req, err := ParseRPCReq(body)
if err != nil {
log.Info("error parsing RPC call", "source", "rpc", "err", err)
writeRPCError(ctx, w, nil, err) writeRPCError(ctx, w, nil, err)
return return
} }
RecordRequestPayloadSize(ctx, req.Method, bodyReader.Len)
backendRes := s.handleSingleRPC(ctx, req)
writeRPCRes(ctx, w, backendRes)
}
func (s *Server) handleSingleRPC(ctx context.Context, req *RPCReq) *RPCRes {
if err := ValidateRPCReq(req); err != nil {
RecordRPCError(ctx, BackendProxyd, MethodUnknown, err)
return NewRPCErrorRes(nil, err)
}
group := s.rpcMethodMappings[req.Method] group := s.rpcMethodMappings[req.Method]
if group == "" { if group == "" {
...@@ -143,16 +202,11 @@ func (s *Server) HandleRPC(w http.ResponseWriter, r *http.Request) { ...@@ -143,16 +202,11 @@ func (s *Server) HandleRPC(w http.ResponseWriter, r *http.Request) {
"method", req.Method, "method", req.Method,
) )
RecordRPCError(ctx, BackendProxyd, MethodUnknown, ErrMethodNotWhitelisted) RecordRPCError(ctx, BackendProxyd, MethodUnknown, ErrMethodNotWhitelisted)
writeRPCError(ctx, w, req.ID, ErrMethodNotWhitelisted) return NewRPCErrorRes(req.ID, ErrMethodNotWhitelisted)
return
} }
var backendRes *RPCRes var backendRes *RPCRes
backendRes, err = s.cache.GetRPC(ctx, req) backendRes, err := s.cache.GetRPC(ctx, req)
if err == nil && backendRes != nil {
writeRPCRes(ctx, w, backendRes)
return
}
if err != nil { if err != nil {
log.Warn( log.Warn(
"cache lookup error", "cache lookup error",
...@@ -160,6 +214,9 @@ func (s *Server) HandleRPC(w http.ResponseWriter, r *http.Request) { ...@@ -160,6 +214,9 @@ func (s *Server) HandleRPC(w http.ResponseWriter, r *http.Request) {
"err", err, "err", err,
) )
} }
if backendRes != nil {
return backendRes
}
backendRes, err = s.backendGroups[group].Forward(ctx, req) backendRes, err = s.backendGroups[group].Forward(ctx, req)
if err != nil { if err != nil {
...@@ -169,8 +226,7 @@ func (s *Server) HandleRPC(w http.ResponseWriter, r *http.Request) { ...@@ -169,8 +226,7 @@ func (s *Server) HandleRPC(w http.ResponseWriter, r *http.Request) {
"req_id", GetReqID(ctx), "req_id", GetReqID(ctx),
"err", err, "err", err,
) )
writeRPCError(ctx, w, req.ID, err) return NewRPCErrorRes(req.ID, err)
return
} }
if backendRes.Error == nil { if backendRes.Error == nil {
...@@ -183,7 +239,7 @@ func (s *Server) HandleRPC(w http.ResponseWriter, r *http.Request) { ...@@ -183,7 +239,7 @@ func (s *Server) HandleRPC(w http.ResponseWriter, r *http.Request) {
} }
} }
writeRPCRes(ctx, w, backendRes) return backendRes
} }
func (s *Server) HandleWS(w http.ResponseWriter, r *http.Request) { func (s *Server) HandleWS(w http.ResponseWriter, r *http.Request) {
...@@ -282,6 +338,7 @@ func writeRPCRes(ctx context.Context, w http.ResponseWriter, res *RPCRes) { ...@@ -282,6 +338,7 @@ func writeRPCRes(ctx context.Context, w http.ResponseWriter, res *RPCRes) {
statusCode = res.Error.HTTPErrorCode statusCode = res.Error.HTTPErrorCode
} }
w.Header().Set("content-type", "application/json")
w.WriteHeader(statusCode) w.WriteHeader(statusCode)
ww := &recordLenWriter{Writer: w} ww := &recordLenWriter{Writer: w}
enc := json.NewEncoder(ww) enc := json.NewEncoder(ww)
...@@ -294,6 +351,19 @@ func writeRPCRes(ctx context.Context, w http.ResponseWriter, res *RPCRes) { ...@@ -294,6 +351,19 @@ func writeRPCRes(ctx context.Context, w http.ResponseWriter, res *RPCRes) {
RecordResponsePayloadSize(ctx, ww.Len) RecordResponsePayloadSize(ctx, ww.Len)
} }
func writeBatchRPCRes(ctx context.Context, w http.ResponseWriter, res []*RPCRes) {
w.Header().Set("content-type", "application/json")
w.WriteHeader(200)
ww := &recordLenWriter{Writer: w}
enc := json.NewEncoder(ww)
if err := enc.Encode(res); err != nil {
log.Error("error writing batch rpc response", "err", err)
RecordRPCError(ctx, BackendProxyd, MethodUnknown, err)
return
}
RecordResponsePayloadSize(ctx, ww.Len)
}
func instrumentedHdlr(h http.Handler) http.HandlerFunc { func instrumentedHdlr(h http.Handler) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) { return func(w http.ResponseWriter, r *http.Request) {
respTimer := prometheus.NewTimer(httpRequestDurationSumm) respTimer := prometheus.NewTimer(httpRequestDurationSumm)
......
...@@ -4,3 +4,14 @@ L1_URL= ...@@ -4,3 +4,14 @@ L1_URL=
L2_URL= L2_URL=
ADDRESS_MANAGER= ADDRESS_MANAGER=
L2_CHAINID= L2_CHAINID=
DTL_ENQUEUE_CONFIRMATIONS=
OVMCONTEXT_SPEC_NUM_TXS=1
# Can be set to true below if the withdrawal window is short enough
RUN_WITHDRAWAL_TESTS=false
RUN_DEBUG_TRACE_TESTS=false
RUN_REPLICA_TESTS=false
RUN_STRESS_TESTS=false
# Can be configured up or down as necessary
MOCHA_TIMEOUT=300000
# Set to true to make Mocha stop after the first failed test.
MOCHA_BAIL=false
\ No newline at end of file
import { utils, Wallet, BigNumber } from 'ethers' import { utils, Wallet, BigNumber } from 'ethers'
import { expect } from 'chai'
import { setupActor, setupRun, actor, run } from './lib/convenience' import { setupActor, setupRun, actor, run } from './lib/convenience'
import { OptimismEnv } from '../test/shared/env' import { OptimismEnv } from '../test/shared/env'
import { Direction } from '../test/shared/watcher-utils' import { Direction } from '../test/shared/watcher-utils'
import { expect } from 'chai'
interface BenchContext { interface BenchContext {
l1Wallet: Wallet l1Wallet: Wallet
......
import { performance } from 'perf_hooks'
import { Mutex } from 'async-mutex' import { Mutex } from 'async-mutex'
import { sleep } from '../../test/shared/utils'
import { import {
sanitizeForMetrics, sanitizeForMetrics,
benchDurationsSummary, benchDurationsSummary,
...@@ -9,7 +11,7 @@ import { ...@@ -9,7 +11,7 @@ import {
failedBenchRunsTotal, failedBenchRunsTotal,
} from './metrics' } from './metrics'
import { ActorLogger, WorkerLogger } from './logger' import { ActorLogger, WorkerLogger } from './logger'
import { performance } from 'perf_hooks' import { sleep } from '../../test/shared/utils'
// eslint-disable-next-line @typescript-eslint/no-empty-function // eslint-disable-next-line @typescript-eslint/no-empty-function
const asyncNoop = async () => {} const asyncNoop = async () => {}
......
import fs from 'fs' import fs from 'fs'
import client from 'prom-client'
import http from 'http' import http from 'http'
import url from 'url' import url from 'url'
import client from 'prom-client'
export const metricsRegistry = new client.Registry() export const metricsRegistry = new client.Registry()
const metricName = (name: string) => { const metricName = (name: string) => {
......
import * as path from 'path' import * as path from 'path'
import { Command } from 'commander'
import { defaultRuntime } from './convenience' import { defaultRuntime } from './convenience'
import { RunOpts } from './actor' import { RunOpts } from './actor'
import { Command } from 'commander'
import pkg from '../../package.json'
import { serveMetrics } from './metrics' import { serveMetrics } from './metrics'
import pkg from '../../package.json'
const program = new Command() const program = new Command()
program.version(pkg.version) program.version(pkg.version)
......
import { utils, Wallet, Contract } from 'ethers' import { utils, Wallet, Contract } from 'ethers'
import { expect } from 'chai'
import { actor, run, setupActor, setupRun } from './lib/convenience' import { actor, run, setupActor, setupRun } from './lib/convenience'
import { OptimismEnv } from '../test/shared/env' import { OptimismEnv } from '../test/shared/env'
import ERC721 from '../artifacts/contracts/NFT.sol/NFT.json' import ERC721 from '../artifacts/contracts/NFT.sol/NFT.json'
import { expect } from 'chai'
interface Context { interface Context {
wallet: Wallet wallet: Wallet
......
import { utils, Wallet, BigNumber } from 'ethers' import { utils, Wallet, BigNumber } from 'ethers'
import { expect } from 'chai' import { expect } from 'chai'
import { actor, setupRun, setupActor, run } from './lib/convenience' import { actor, setupRun, setupActor, run } from './lib/convenience'
import { OptimismEnv } from '../test/shared/env' import { OptimismEnv } from '../test/shared/env'
......
import { utils, Wallet, Contract, ContractFactory } from 'ethers' import { utils, Wallet, Contract } from 'ethers'
import { ethers } from 'hardhat'
import { expect } from 'chai'
import { actor, setupActor, run, setupRun } from './lib/convenience' import { actor, setupActor, run, setupRun } from './lib/convenience'
import { OptimismEnv } from '../test/shared/env' import { OptimismEnv } from '../test/shared/env'
import StateDOS from '../artifacts/contracts/StateDOS.sol/StateDOS.json'
import { expect } from 'chai'
interface Context { interface Context {
wallet: Wallet wallet: Wallet
...@@ -16,11 +17,7 @@ actor('Trie DoS accounts', () => { ...@@ -16,11 +17,7 @@ actor('Trie DoS accounts', () => {
setupActor(async () => { setupActor(async () => {
env = await OptimismEnv.new() env = await OptimismEnv.new()
const factory = new ContractFactory( const factory = await ethers.getContractFactory('StateDOS', env.l2Wallet)
StateDOS.abi,
StateDOS.bytecode,
env.l2Wallet
)
contract = await factory.deploy() contract = await factory.deploy()
await contract.deployed() await contract.deployed()
}) })
......
import { Contract, utils, Wallet } from 'ethers' import { Contract, utils, Wallet } from 'ethers'
import { actor, run, setupActor, setupRun } from './lib/convenience'
import { OptimismEnv } from '../test/shared/env'
import { FeeAmount } from '@uniswap/v3-sdk' import { FeeAmount } from '@uniswap/v3-sdk'
import ERC20 from '../artifacts/contracts/ERC20.sol/ERC20.json'
import { abi as NFTABI } from '@uniswap/v3-periphery/artifacts/contracts/NonfungiblePositionManager.sol/NonfungiblePositionManager.json' import { abi as NFTABI } from '@uniswap/v3-periphery/artifacts/contracts/NonfungiblePositionManager.sol/NonfungiblePositionManager.json'
import { abi as RouterABI } from '@uniswap/v3-periphery/artifacts/contracts/SwapRouter.sol/SwapRouter.json' import { abi as RouterABI } from '@uniswap/v3-periphery/artifacts/contracts/SwapRouter.sol/SwapRouter.json'
import { actor, run, setupActor, setupRun } from './lib/convenience'
import { OptimismEnv } from '../test/shared/env'
import ERC20 from '../artifacts/contracts/ERC20.sol/ERC20.json'
interface Context { interface Context {
contracts: { [name: string]: Contract } contracts: { [name: string]: Contract }
wallet: Wallet wallet: Wallet
......
// SPDX-License-Identifier: MIT
pragma solidity >=0.5.0;
pragma experimental ABIEncoderV2;
// https://github.com/makerdao/multicall/blob/master/src/Multicall.sol
/// @title Multicall - Aggregate results from multiple read-only function calls
/// @author Michael Elliot <mike@makerdao.com>
/// @author Joshua Levine <joshua@makerdao.com>
/// @author Nick Johnson <arachnid@notdot.net>
contract Multicall {
struct Call {
address target;
bytes callData;
}
function aggregate(Call[] memory calls) public returns (uint256 blockNumber, bytes[] memory returnData) {
blockNumber = block.number;
returnData = new bytes[](calls.length);
for (uint256 i = 0; i < calls.length; i++) {
(bool success, bytes memory ret) = calls[i].target.call(calls[i].callData);
require(success);
returnData[i] = ret;
}
}
// Helper functions
function getEthBalance(address addr) public view returns (uint256 balance) {
balance = addr.balance;
}
function getBlockHash(uint256 blockNumber) public view returns (bytes32 blockHash) {
blockHash = blockhash(blockNumber);
}
function getLastBlockHash() public view returns (bytes32 blockHash) {
blockHash = blockhash(block.number - 1);
}
function getCurrentBlockTimestamp() public view returns (uint256 timestamp) {
timestamp = block.timestamp;
}
function getCurrentBlockDifficulty() public view returns (uint256 difficulty) {
difficulty = block.difficulty;
}
function getCurrentBlockGasLimit() public view returns (uint256 gaslimit) {
gaslimit = block.gaslimit;
}
function getCurrentBlockCoinbase() public view returns (address coinbase) {
coinbase = block.coinbase;
}
}
...@@ -22,26 +22,13 @@ pragma solidity ^0.8.9; ...@@ -22,26 +22,13 @@ pragma solidity ^0.8.9;
// Can't do this until the package is published. // Can't do this until the package is published.
//import { iOVM_L1BlockNumber } from "@eth-optimism/contracts/iOVM_L1BlockNumber"; //import { iOVM_L1BlockNumber } from "@eth-optimism/contracts/iOVM_L1BlockNumber";
import { iOVM_L1BlockNumber } from "./OVMContextStorage.sol";
/// @title OVMMulticall - Aggregate results from multiple read-only function calls interface iOVM_L1BlockNumber {
contract OVMMulticall { function getL1BlockNumber() external view returns (uint256);
struct Call { }
address target;
bytes callData;
}
function aggregate(Call[] memory calls) public returns (uint256 blockNumber, bytes[] memory returnData) {
blockNumber = block.number;
returnData = new bytes[](calls.length);
for (uint256 i = 0; i < calls.length; i++) {
(bool success, bytes memory ret) = calls[i].target.call(calls[i].callData);
require(success);
returnData[i] = ret;
}
}
// Helper functions /// @title OVMContext - Helper Functions
contract OVMContext {
function getCurrentBlockTimestamp() public view returns (uint256 timestamp) { function getCurrentBlockTimestamp() public view returns (uint256 timestamp) {
timestamp = block.timestamp; timestamp = block.timestamp;
} }
......
// SPDX-License-Identifier: MIT // SPDX-License-Identifier: MIT
pragma solidity ^0.8.9; pragma solidity ^0.8.9;
// Can't do this until the package is published. import {OVMContext} from "./OVMContext.sol";
//import { iOVM_L1BlockNumber } from "@eth-optimism/contracts/iOVM_L1BlockNumber";
interface iOVM_L1BlockNumber {
function getL1BlockNumber() external view returns (uint256);
}
contract OVMContextStorage { contract OVMContextStorage is OVMContext {
mapping (uint256 => uint256) public l1BlockNumbers; mapping(uint256 => uint256) public l1BlockNumbers;
mapping (uint256 => uint256) public blockNumbers; mapping(uint256 => uint256) public blockNumbers;
mapping (uint256 => uint256) public timestamps; mapping(uint256 => uint256) public timestamps;
mapping (uint256 => uint256) public difficulty; mapping(uint256 => uint256) public difficulty;
mapping (uint256 => address) public coinbases; mapping(uint256 => address) public coinbases;
uint256 public index = 0; uint256 public index = 0;
fallback() external { fallback() external {
l1BlockNumbers[index] = iOVM_L1BlockNumber( l1BlockNumbers[index] = getCurrentL1BlockNumber();
0x4200000000000000000000000000000000000013 blockNumbers[index] = getCurrentBlockNumber();
).getL1BlockNumber(); timestamps[index] = getCurrentBlockTimestamp();
blockNumbers[index] = block.number;
timestamps[index] = block.timestamp;
difficulty[index] = block.difficulty; difficulty[index] = block.difficulty;
coinbases[index] = block.coinbase; coinbases[index] = block.coinbase;
index++; index++;
......
...@@ -4,7 +4,7 @@ import { HardhatUserConfig } from 'hardhat/types' ...@@ -4,7 +4,7 @@ import { HardhatUserConfig } from 'hardhat/types'
import '@nomiclabs/hardhat-ethers' import '@nomiclabs/hardhat-ethers'
import '@nomiclabs/hardhat-waffle' import '@nomiclabs/hardhat-waffle'
import 'hardhat-gas-reporter' import 'hardhat-gas-reporter'
import { isLiveNetwork } from './test/shared/utils' import { envConfig } from './test/shared/utils'
const enableGasReport = !!process.env.ENABLE_GAS_REPORT const enableGasReport = !!process.env.ENABLE_GAS_REPORT
...@@ -15,7 +15,8 @@ const config: HardhatUserConfig = { ...@@ -15,7 +15,8 @@ const config: HardhatUserConfig = {
}, },
}, },
mocha: { mocha: {
timeout: isLiveNetwork() ? 300_000 : 75_000, timeout: envConfig.MOCHA_TIMEOUT,
bail: envConfig.MOCHA_BAIL,
}, },
solidity: { solidity: {
compilers: [ compilers: [
......
...@@ -41,7 +41,6 @@ ...@@ -41,7 +41,6 @@
"@types/chai-as-promised": "^7.1.4", "@types/chai-as-promised": "^7.1.4",
"@types/mocha": "^8.2.2", "@types/mocha": "^8.2.2",
"@types/rimraf": "^3.0.0", "@types/rimraf": "^3.0.0",
"@types/shelljs": "^0.8.8",
"@typescript-eslint/eslint-plugin": "^4.26.0", "@typescript-eslint/eslint-plugin": "^4.26.0",
"@typescript-eslint/parser": "^4.26.0", "@typescript-eslint/parser": "^4.26.0",
"@uniswap/v3-core": "1.0.0", "@uniswap/v3-core": "1.0.0",
...@@ -52,7 +51,6 @@ ...@@ -52,7 +51,6 @@
"chai": "^4.3.4", "chai": "^4.3.4",
"chai-as-promised": "^7.1.1", "chai-as-promised": "^7.1.1",
"commander": "^8.3.0", "commander": "^8.3.0",
"docker-compose": "^0.23.8",
"dotenv": "^10.0.0", "dotenv": "^10.0.0",
"envalid": "^7.1.0", "envalid": "^7.1.0",
"eslint": "^7.27.0", "eslint": "^7.27.0",
...@@ -71,7 +69,6 @@ ...@@ -71,7 +69,6 @@
"mocha": "^8.4.0", "mocha": "^8.4.0",
"prom-client": "^14.0.1", "prom-client": "^14.0.1",
"rimraf": "^3.0.2", "rimraf": "^3.0.2",
"shelljs": "^0.8.4",
"typescript": "^4.3.5", "typescript": "^4.3.5",
"uniswap-v3-deploy-plugin": "^0.1.0" "uniswap-v3-deploy-plugin": "^0.1.0"
} }
......
import { expect } from './shared/setup'
/* Imports: External */ /* Imports: External */
import { Contract, ContractFactory } from 'ethers' import { Contract, ContractFactory } from 'ethers'
import { ethers } from 'hardhat'
import { applyL1ToL2Alias, awaitCondition } from '@eth-optimism/core-utils' import { applyL1ToL2Alias, awaitCondition } from '@eth-optimism/core-utils'
/* Imports: Internal */ /* Imports: Internal */
import simpleStorageJson from '../artifacts/contracts/SimpleStorage.sol/SimpleStorage.json' import { expect } from './shared/setup'
import l2ReverterJson from '../artifacts/contracts/Reverter.sol/Reverter.json'
import { Direction } from './shared/watcher-utils' import { Direction } from './shared/watcher-utils'
import { OptimismEnv } from './shared/env' import { OptimismEnv } from './shared/env'
import { isMainnet } from './shared/utils' import {
DEFAULT_TEST_GAS_L1,
DEFAULT_TEST_GAS_L2,
envConfig,
sleep,
withdrawalTest,
} from './shared/utils'
describe('Basic L1<>L2 Communication', async () => { describe('Basic L1<>L2 Communication', async () => {
let Factory__L1SimpleStorage: ContractFactory let Factory__L1SimpleStorage: ContractFactory
...@@ -22,61 +26,58 @@ describe('Basic L1<>L2 Communication', async () => { ...@@ -22,61 +26,58 @@ describe('Basic L1<>L2 Communication', async () => {
before(async () => { before(async () => {
env = await OptimismEnv.new() env = await OptimismEnv.new()
Factory__L1SimpleStorage = new ContractFactory( Factory__L1SimpleStorage = await ethers.getContractFactory(
simpleStorageJson.abi, 'SimpleStorage',
simpleStorageJson.bytecode,
env.l1Wallet env.l1Wallet
) )
Factory__L2SimpleStorage = new ContractFactory( Factory__L2SimpleStorage = await ethers.getContractFactory(
simpleStorageJson.abi, 'SimpleStorage',
simpleStorageJson.bytecode,
env.l2Wallet env.l2Wallet
) )
Factory__L2Reverter = new ContractFactory( Factory__L2Reverter = await ethers.getContractFactory(
l2ReverterJson.abi, 'Reverter',
l2ReverterJson.bytecode,
env.l2Wallet env.l2Wallet
) )
}) })
beforeEach(async () => { beforeEach(async () => {
L1SimpleStorage = await Factory__L1SimpleStorage.deploy() L1SimpleStorage = await Factory__L1SimpleStorage.deploy()
await L1SimpleStorage.deployTransaction.wait() await L1SimpleStorage.deployed()
L2SimpleStorage = await Factory__L2SimpleStorage.deploy() L2SimpleStorage = await Factory__L2SimpleStorage.deploy()
await L2SimpleStorage.deployTransaction.wait() await L2SimpleStorage.deployed()
L2Reverter = await Factory__L2Reverter.deploy() L2Reverter = await Factory__L2Reverter.deploy()
await L2Reverter.deployTransaction.wait() await L2Reverter.deployed()
}) })
describe('L2 => L1', () => { describe('L2 => L1', () => {
it('should be able to perform a withdrawal from L2 -> L1', async function () { withdrawalTest(
if (await isMainnet(env)) { 'should be able to perform a withdrawal from L2 -> L1',
console.log('Skipping withdrawals test on mainnet.') async () => {
this.skip() const value = `0x${'77'.repeat(32)}`
return
} // Send L2 -> L1 message.
const transaction = await env.l2Messenger.sendMessage(
const value = `0x${'77'.repeat(32)}` L1SimpleStorage.address,
L1SimpleStorage.interface.encodeFunctionData('setValue', [value]),
// Send L2 -> L1 message. 5000000,
const transaction = await env.l2Messenger.sendMessage( {
L1SimpleStorage.address, gasLimit: DEFAULT_TEST_GAS_L2,
L1SimpleStorage.interface.encodeFunctionData('setValue', [value]), }
5000000 )
) await transaction.wait()
await transaction.wait() await env.relayXDomainMessages(transaction)
await env.relayXDomainMessages(transaction) await env.waitForXDomainTransaction(transaction, Direction.L2ToL1)
await env.waitForXDomainTransaction(transaction, Direction.L2ToL1)
expect(await L1SimpleStorage.msgSender()).to.equal( expect(await L1SimpleStorage.msgSender()).to.equal(
env.l1Messenger.address env.l1Messenger.address
) )
expect(await L1SimpleStorage.xDomainSender()).to.equal( expect(await L1SimpleStorage.xDomainSender()).to.equal(
env.l2Wallet.address env.l2Wallet.address
) )
expect(await L1SimpleStorage.value()).to.equal(value) expect(await L1SimpleStorage.value()).to.equal(value)
expect((await L1SimpleStorage.totalCount()).toNumber()).to.equal(1) expect((await L1SimpleStorage.totalCount()).toNumber()).to.equal(1)
}) }
)
}) })
describe('L1 => L2', () => { describe('L1 => L2', () => {
...@@ -87,7 +88,10 @@ describe('Basic L1<>L2 Communication', async () => { ...@@ -87,7 +88,10 @@ describe('Basic L1<>L2 Communication', async () => {
const transaction = await env.l1Messenger.sendMessage( const transaction = await env.l1Messenger.sendMessage(
L2SimpleStorage.address, L2SimpleStorage.address,
L2SimpleStorage.interface.encodeFunctionData('setValue', [value]), L2SimpleStorage.interface.encodeFunctionData('setValue', [value]),
5000000 5000000,
{
gasLimit: DEFAULT_TEST_GAS_L1,
}
) )
await env.waitForXDomainTransaction(transaction, Direction.L1ToL2) await env.waitForXDomainTransaction(transaction, Direction.L1ToL2)
...@@ -105,19 +109,41 @@ describe('Basic L1<>L2 Communication', async () => { ...@@ -105,19 +109,41 @@ describe('Basic L1<>L2 Communication', async () => {
expect((await L2SimpleStorage.totalCount()).toNumber()).to.equal(1) expect((await L2SimpleStorage.totalCount()).toNumber()).to.equal(1)
}) })
it('should deposit from L1 -> L2 directly via enqueue', async () => { it('should deposit from L1 -> L2 directly via enqueue', async function () {
this.timeout(
envConfig.MOCHA_TIMEOUT * 2 +
envConfig.DTL_ENQUEUE_CONFIRMATIONS * 15000
)
const value = `0x${'42'.repeat(32)}` const value = `0x${'42'.repeat(32)}`
// Send L1 -> L2 message. // Send L1 -> L2 message.
await env.ctc const tx = await env.ctc
.connect(env.l1Wallet) .connect(env.l1Wallet)
.enqueue( .enqueue(
L2SimpleStorage.address, L2SimpleStorage.address,
5000000, 5000000,
L2SimpleStorage.interface.encodeFunctionData('setValueNotXDomain', [ L2SimpleStorage.interface.encodeFunctionData('setValueNotXDomain', [
value, value,
]) ]),
{
gasLimit: DEFAULT_TEST_GAS_L1,
}
)
const receipt = await tx.wait()
const waitUntilBlock =
receipt.blockNumber + envConfig.DTL_ENQUEUE_CONFIRMATIONS
let currBlock = await env.l1Provider.getBlockNumber()
while (currBlock <= waitUntilBlock) {
const progress =
envConfig.DTL_ENQUEUE_CONFIRMATIONS - (waitUntilBlock - currBlock)
console.log(
`Waiting for ${progress}/${envConfig.DTL_ENQUEUE_CONFIRMATIONS} confirmations.`
) )
await sleep(5000)
currBlock = await env.l1Provider.getBlockNumber()
}
console.log('Enqueue should be confirmed.')
await awaitCondition( await awaitCondition(
async () => { async () => {
...@@ -142,8 +168,12 @@ describe('Basic L1<>L2 Communication', async () => { ...@@ -142,8 +168,12 @@ describe('Basic L1<>L2 Communication', async () => {
const transaction = await env.l1Messenger.sendMessage( const transaction = await env.l1Messenger.sendMessage(
L2SimpleStorage.address, L2SimpleStorage.address,
L2SimpleStorage.interface.encodeFunctionData('setValue', [value]), L2SimpleStorage.interface.encodeFunctionData('setValue', [value]),
5000000 5000000,
{
gasLimit: DEFAULT_TEST_GAS_L1,
}
) )
await transaction.wait()
const { remoteReceipt } = await env.waitForXDomainTransaction( const { remoteReceipt } = await env.waitForXDomainTransaction(
transaction, transaction,
...@@ -159,7 +189,10 @@ describe('Basic L1<>L2 Communication', async () => { ...@@ -159,7 +189,10 @@ describe('Basic L1<>L2 Communication', async () => {
const transaction = await env.l1Messenger.sendMessage( const transaction = await env.l1Messenger.sendMessage(
L2Reverter.address, L2Reverter.address,
L2Reverter.interface.encodeFunctionData('doRevert', []), L2Reverter.interface.encodeFunctionData('doRevert', []),
5000000 5000000,
{
gasLimit: DEFAULT_TEST_GAS_L1,
}
) )
const { remoteReceipt } = await env.waitForXDomainTransaction( const { remoteReceipt } = await env.waitForXDomainTransaction(
......
import { expect } from './shared/setup'
import { BigNumber, Contract, ContractFactory, utils, Wallet } from 'ethers' import { BigNumber, Contract, ContractFactory, utils, Wallet } from 'ethers'
import { ethers } from 'hardhat' import { ethers } from 'hardhat'
import * as L2Artifact from '@eth-optimism/contracts/artifacts/contracts/standards/L2StandardERC20.sol/L2StandardERC20.json' import * as L2Artifact from '@eth-optimism/contracts/artifacts/contracts/standards/L2StandardERC20.sol/L2StandardERC20.json'
import { expect } from './shared/setup'
import { OptimismEnv } from './shared/env' import { OptimismEnv } from './shared/env'
import { isLiveNetwork, isMainnet } from './shared/utils' import { withdrawalTest } from './shared/utils'
import { Direction } from './shared/watcher-utils' import { Direction } from './shared/watcher-utils'
describe('Bridged tokens', () => { describe('Bridged tokens', () => {
...@@ -25,14 +24,16 @@ describe('Bridged tokens', () => { ...@@ -25,14 +24,16 @@ describe('Bridged tokens', () => {
const other = Wallet.createRandom() const other = Wallet.createRandom()
otherWalletL1 = other.connect(env.l1Wallet.provider) otherWalletL1 = other.connect(env.l1Wallet.provider)
otherWalletL2 = other.connect(env.l2Wallet.provider) otherWalletL2 = other.connect(env.l2Wallet.provider)
await env.l1Wallet.sendTransaction({ let tx = await env.l1Wallet.sendTransaction({
to: otherWalletL1.address, to: otherWalletL1.address,
value: utils.parseEther('0.01'), value: utils.parseEther('0.01'),
}) })
await env.l2Wallet.sendTransaction({ await tx.wait()
tx = await env.l2Wallet.sendTransaction({
to: otherWalletL2.address, to: otherWalletL2.address,
value: utils.parseEther('0.01'), value: utils.parseEther('0.01'),
}) })
await tx.wait()
L1Factory__ERC20 = await ethers.getContractFactory('ERC20', env.l1Wallet) L1Factory__ERC20 = await ethers.getContractFactory('ERC20', env.l1Wallet)
L2Factory__ERC20 = new ethers.ContractFactory( L2Factory__ERC20 = new ethers.ContractFactory(
...@@ -77,7 +78,7 @@ describe('Bridged tokens', () => { ...@@ -77,7 +78,7 @@ describe('Bridged tokens', () => {
expect(await L2__ERC20.balanceOf(env.l2Wallet.address)).to.deep.equal( expect(await L2__ERC20.balanceOf(env.l2Wallet.address)).to.deep.equal(
BigNumber.from(1000) BigNumber.from(1000)
) )
}).timeout(isLiveNetwork() ? 300_000 : 120_000) })
it('should transfer tokens on L2', async () => { it('should transfer tokens on L2', async () => {
const tx = await L2__ERC20.transfer(otherWalletL1.address, 500) const tx = await L2__ERC20.transfer(otherWalletL1.address, 500)
...@@ -90,46 +91,40 @@ describe('Bridged tokens', () => { ...@@ -90,46 +91,40 @@ describe('Bridged tokens', () => {
) )
}) })
it('should withdraw tokens from L2 to the depositor', async function () { withdrawalTest(
if (await isMainnet(env)) { 'should withdraw tokens from L2 to the depositor',
console.log('Skipping withdrawals test on mainnet.') async () => {
this.skip() const tx = await env.l2Bridge.withdraw(
return L2__ERC20.address,
500,
2000000,
'0x'
)
await env.relayXDomainMessages(tx)
await env.waitForXDomainTransaction(tx, Direction.L2ToL1)
expect(await L1__ERC20.balanceOf(env.l1Wallet.address)).to.deep.equal(
BigNumber.from(999500)
)
expect(await L2__ERC20.balanceOf(env.l2Wallet.address)).to.deep.equal(
BigNumber.from(0)
)
} }
)
const tx = await env.l2Bridge.withdraw(
L2__ERC20.address, withdrawalTest(
500, 'should withdraw tokens from L2 to the transfer recipient',
2000000, async () => {
'0x' const tx = await env.l2Bridge
) .connect(otherWalletL2)
await env.relayXDomainMessages(tx) .withdraw(L2__ERC20.address, 500, 2000000, '0x')
await env.waitForXDomainTransaction(tx, Direction.L2ToL1) await env.relayXDomainMessages(tx)
expect(await L1__ERC20.balanceOf(env.l1Wallet.address)).to.deep.equal( await env.waitForXDomainTransaction(tx, Direction.L2ToL1)
BigNumber.from(999500) expect(await L1__ERC20.balanceOf(otherWalletL1.address)).to.deep.equal(
) BigNumber.from(500)
expect(await L2__ERC20.balanceOf(env.l2Wallet.address)).to.deep.equal( )
BigNumber.from(0) expect(await L2__ERC20.balanceOf(otherWalletL2.address)).to.deep.equal(
) BigNumber.from(0)
}).timeout(isLiveNetwork() ? 300_000 : 120_000) )
it('should withdraw tokens from L2 to the transfer recipient', async function () {
if (await isMainnet(env)) {
console.log('Skipping withdrawals test on mainnet.')
this.skip()
return
} }
)
const tx = await env.l2Bridge
.connect(otherWalletL2)
.withdraw(L2__ERC20.address, 500, 2000000, '0x')
await env.relayXDomainMessages(tx)
await env.waitForXDomainTransaction(tx, Direction.L2ToL1)
expect(await L1__ERC20.balanceOf(otherWalletL1.address)).to.deep.equal(
BigNumber.from(500)
)
expect(await L2__ERC20.balanceOf(otherWalletL2.address)).to.deep.equal(
BigNumber.from(0)
)
}).timeout(isLiveNetwork() ? 300_000 : 120_000)
}) })
import { expect } from './shared/setup'
import { BigNumber, Contract, ContractFactory, utils, Wallet } from 'ethers' import { BigNumber, Contract, ContractFactory, utils, Wallet } from 'ethers'
import { ethers } from 'hardhat' import { ethers } from 'hardhat'
import { UniswapV3Deployer } from 'uniswap-v3-deploy-plugin/dist/deployer/UniswapV3Deployer' import { UniswapV3Deployer } from 'uniswap-v3-deploy-plugin/dist/deployer/UniswapV3Deployer'
import { OptimismEnv } from './shared/env'
import { FeeAmount, TICK_SPACINGS } from '@uniswap/v3-sdk' import { FeeAmount, TICK_SPACINGS } from '@uniswap/v3-sdk'
import { abi as NFTABI } from '@uniswap/v3-periphery/artifacts/contracts/NonfungiblePositionManager.sol/NonfungiblePositionManager.json' import { abi as NFTABI } from '@uniswap/v3-periphery/artifacts/contracts/NonfungiblePositionManager.sol/NonfungiblePositionManager.json'
import { abi as RouterABI } from '@uniswap/v3-periphery/artifacts/contracts/SwapRouter.sol/SwapRouter.json' import { abi as RouterABI } from '@uniswap/v3-periphery/artifacts/contracts/SwapRouter.sol/SwapRouter.json'
import { OptimismEnv } from './shared/env'
import { expect } from './shared/setup'
// Below methods taken from the Uniswap test suite, see // Below methods taken from the Uniswap test suite, see
// https://github.com/Uniswap/v3-periphery/blob/main/test/shared/ticks.ts // https://github.com/Uniswap/v3-periphery/blob/main/test/shared/ticks.ts
const getMinTick = (tickSpacing: number) => const getMinTick = (tickSpacing: number) =>
......
import { expect } from './shared/setup'
/* Imports: External */ /* Imports: External */
import { BigNumber, utils } from 'ethers' import { BigNumber, utils } from 'ethers'
import { serialize } from '@ethersproject/transactions' import { serialize } from '@ethersproject/transactions'
import { predeploys, getContractFactory } from '@eth-optimism/contracts' import { predeploys, getContractFactory } from '@eth-optimism/contracts'
/* Imports: Internal */ /* Imports: Internal */
import { isLiveNetwork } from './shared/utils' import { expect } from './shared/setup'
import { hardhatTest } from './shared/utils'
import { OptimismEnv } from './shared/env' import { OptimismEnv } from './shared/env'
import { Direction } from './shared/watcher-utils' import { Direction } from './shared/watcher-utils'
const setPrices = async (env: OptimismEnv, value: number | BigNumber) => { const setPrices = async (env: OptimismEnv, value: number | BigNumber) => {
if (isLiveNetwork()) {
return
}
const gasPrice = await env.gasPriceOracle.setGasPrice(value) const gasPrice = await env.gasPriceOracle.setGasPrice(value)
await gasPrice.wait() await gasPrice.wait()
const baseFee = await env.gasPriceOracle.setL1BaseFee(value) const baseFee = await env.gasPriceOracle.setL1BaseFee(value)
...@@ -28,24 +24,25 @@ describe('Fee Payment Integration Tests', async () => { ...@@ -28,24 +24,25 @@ describe('Fee Payment Integration Tests', async () => {
env = await OptimismEnv.new() env = await OptimismEnv.new()
}) })
if (!isLiveNetwork()) { hardhatTest(
it(`should return eth_gasPrice equal to OVM_GasPriceOracle.gasPrice`, async () => { `should return eth_gasPrice equal to OVM_GasPriceOracle.gasPrice`,
async () => {
const assertGasPrice = async () => { const assertGasPrice = async () => {
const gasPrice = await env.l2Wallet.getGasPrice() const gasPrice = await env.l2Wallet.getGasPrice()
const oracleGasPrice = await env.gasPriceOracle.gasPrice() const oracleGasPrice = await env.gasPriceOracle.gasPrice()
expect(gasPrice).to.deep.equal(oracleGasPrice) expect(gasPrice).to.deep.equal(oracleGasPrice)
} }
assertGasPrice() await assertGasPrice()
// update the gas price // update the gas price
const tx = await env.gasPriceOracle.setGasPrice(1000) const tx = await env.gasPriceOracle.setGasPrice(1000)
await tx.wait() await tx.wait()
assertGasPrice() await assertGasPrice()
}) }
} )
it('Paying a nonzero but acceptable gasPrice fee', async () => { hardhatTest('Paying a nonzero but acceptable gasPrice fee', async () => {
await setPrices(env, 1000) await setPrices(env, 1000)
const amount = utils.parseEther('0.0000001') const amount = utils.parseEther('0.0000001')
...@@ -97,7 +94,7 @@ describe('Fee Payment Integration Tests', async () => { ...@@ -97,7 +94,7 @@ describe('Fee Payment Integration Tests', async () => {
await setPrices(env, 1) await setPrices(env, 1)
}) })
it('should compute correct fee', async () => { hardhatTest('should compute correct fee', async () => {
await setPrices(env, 1000) await setPrices(env, 1000)
const preBalance = await env.l2Wallet.getBalance() const preBalance = await env.l2Wallet.getBalance()
...@@ -149,39 +146,38 @@ describe('Fee Payment Integration Tests', async () => { ...@@ -149,39 +146,38 @@ describe('Fee Payment Integration Tests', async () => {
await expect(env.sequencerFeeVault.withdraw()).to.be.rejected await expect(env.sequencerFeeVault.withdraw()).to.be.rejected
}) })
it('should be able to withdraw fees back to L1 once the minimum is met', async function () { hardhatTest(
if (isLiveNetwork()) { 'should be able to withdraw fees back to L1 once the minimum is met',
this.skip() async () => {
return const l1FeeWallet = await env.sequencerFeeVault.l1FeeWallet()
} const balanceBefore = await env.l1Wallet.provider.getBalance(l1FeeWallet)
const withdrawalAmount =
const l1FeeWallet = await env.sequencerFeeVault.l1FeeWallet() await env.sequencerFeeVault.MIN_WITHDRAWAL_AMOUNT()
const balanceBefore = await env.l1Wallet.provider.getBalance(l1FeeWallet)
const withdrawalAmount = await env.sequencerFeeVault.MIN_WITHDRAWAL_AMOUNT() // Transfer the minimum required to withdraw.
const tx = await env.l2Wallet.sendTransaction({
// Transfer the minimum required to withdraw. to: env.sequencerFeeVault.address,
const tx = await env.l2Wallet.sendTransaction({ value: withdrawalAmount,
to: env.sequencerFeeVault.address, gasLimit: 500000,
value: withdrawalAmount, })
gasLimit: 500000, await tx.wait()
})
await tx.wait()
const vaultBalance = await env.ovmEth.balanceOf( const vaultBalance = await env.ovmEth.balanceOf(
env.sequencerFeeVault.address env.sequencerFeeVault.address
) )
const withdrawTx = await env.sequencerFeeVault.withdraw() const withdrawTx = await env.sequencerFeeVault.withdraw()
// Wait for the withdrawal to be relayed to L1. // Wait for the withdrawal to be relayed to L1.
await withdrawTx.wait() await withdrawTx.wait()
await env.relayXDomainMessages(withdrawTx) await env.relayXDomainMessages(withdrawTx)
await env.waitForXDomainTransaction(withdrawTx, Direction.L2ToL1) await env.waitForXDomainTransaction(withdrawTx, Direction.L2ToL1)
// Balance difference should be equal to old L2 balance. // Balance difference should be equal to old L2 balance.
const balanceAfter = await env.l1Wallet.provider.getBalance(l1FeeWallet) const balanceAfter = await env.l1Wallet.provider.getBalance(l1FeeWallet)
expect(balanceAfter.sub(balanceBefore)).to.deep.equal( expect(balanceAfter.sub(balanceBefore)).to.deep.equal(
BigNumber.from(vaultBalance) BigNumber.from(vaultBalance)
) )
}) }
)
}) })
import { expect } from './shared/setup'
import { BigNumber, Contract, ContractFactory, Wallet } from 'ethers' import { BigNumber, Contract, ContractFactory, Wallet } from 'ethers'
import { ethers } from 'hardhat' import { ethers } from 'hardhat'
import { expect } from './shared/setup'
import { import {
fundUser, fundUser,
encodeSolidityRevertMessage, encodeSolidityRevertMessage,
...@@ -45,7 +45,7 @@ describe('Native ETH value integration tests', () => { ...@@ -45,7 +45,7 @@ describe('Native ETH value integration tests', () => {
const there = await wallet.sendTransaction({ const there = await wallet.sendTransaction({
to: other.address, to: other.address,
value, value,
gasPrice: await gasPriceForL2(env), gasPrice: await gasPriceForL2(),
}) })
const thereReceipt = await there.wait() const thereReceipt = await there.wait()
const thereGas = thereReceipt.gasUsed.mul(there.gasPrice) const thereGas = thereReceipt.gasUsed.mul(there.gasPrice)
...@@ -63,7 +63,7 @@ describe('Native ETH value integration tests', () => { ...@@ -63,7 +63,7 @@ describe('Native ETH value integration tests', () => {
const backAgain = await other.sendTransaction({ const backAgain = await other.sendTransaction({
to: wallet.address, to: wallet.address,
value: backVal, value: backVal,
gasPrice: await gasPriceForL2(env), gasPrice: await gasPriceForL2(),
}) })
const backReceipt = await backAgain.wait() const backReceipt = await backAgain.wait()
const backGas = backReceipt.gasUsed.mul(backAgain.gasPrice) const backGas = backReceipt.gasUsed.mul(backAgain.gasPrice)
...@@ -169,7 +169,7 @@ describe('Native ETH value integration tests', () => { ...@@ -169,7 +169,7 @@ describe('Native ETH value integration tests', () => {
it('should allow ETH to be sent', async () => { it('should allow ETH to be sent', async () => {
const sendAmount = 15 const sendAmount = 15
const tx = await ValueCalls0.simpleSend(ValueCalls1.address, sendAmount, { const tx = await ValueCalls0.simpleSend(ValueCalls1.address, sendAmount, {
gasPrice: await gasPriceForL2(env), gasPrice: await gasPriceForL2(),
}) })
await tx.wait() await tx.wait()
......
import { expect } from './shared/setup'
/* Imports: External */ /* Imports: External */
import { Wallet, utils, BigNumber } from 'ethers' import { Wallet, utils, BigNumber } from 'ethers'
import { serialize } from '@ethersproject/transactions' import { serialize } from '@ethersproject/transactions'
...@@ -7,13 +5,17 @@ import { predeploys } from '@eth-optimism/contracts' ...@@ -7,13 +5,17 @@ import { predeploys } from '@eth-optimism/contracts'
import { expectApprox } from '@eth-optimism/core-utils' import { expectApprox } from '@eth-optimism/core-utils'
/* Imports: Internal */ /* Imports: Internal */
import { expect } from './shared/setup'
import { Direction } from './shared/watcher-utils' import { Direction } from './shared/watcher-utils'
import {
import { isMainnet, PROXY_SEQUENCER_ENTRYPOINT_ADDRESS } from './shared/utils' DEFAULT_TEST_GAS_L1,
DEFAULT_TEST_GAS_L2,
envConfig,
PROXY_SEQUENCER_ENTRYPOINT_ADDRESS,
withdrawalTest,
} from './shared/utils'
import { OptimismEnv } from './shared/env' import { OptimismEnv } from './shared/env'
const DEFAULT_TEST_GAS_L1 = 330_000
const DEFAULT_TEST_GAS_L2 = 1_300_000
// TX size enforced by CTC: // TX size enforced by CTC:
const MAX_ROLLUP_TX_SIZE = 50_000 const MAX_ROLLUP_TX_SIZE = 50_000
...@@ -183,13 +185,7 @@ describe('Native ETH Integration Tests', async () => { ...@@ -183,13 +185,7 @@ describe('Native ETH Integration Tests', async () => {
).to.be.reverted ).to.be.reverted
}) })
it('withdraw', async function () { withdrawalTest('withdraw', async () => {
if (await isMainnet(env)) {
console.log('Skipping withdrawals test on mainnet.')
this.skip()
return
}
const withdrawAmount = BigNumber.from(3) const withdrawAmount = BigNumber.from(3)
const preBalances = await getBalances(env) const preBalances = await getBalances(env)
expect( expect(
...@@ -231,13 +227,7 @@ describe('Native ETH Integration Tests', async () => { ...@@ -231,13 +227,7 @@ describe('Native ETH Integration Tests', async () => {
) )
}) })
it('withdrawTo', async function () { withdrawalTest('withdrawTo', async () => {
if (await isMainnet(env)) {
console.log('Skipping withdrawals test on mainnet.')
this.skip()
return
}
const withdrawAmount = BigNumber.from(3) const withdrawAmount = BigNumber.from(3)
const preBalances = await getBalances(env) const preBalances = await getBalances(env)
...@@ -295,73 +285,71 @@ describe('Native ETH Integration Tests', async () => { ...@@ -295,73 +285,71 @@ describe('Native ETH Integration Tests', async () => {
) )
}) })
it('deposit, transfer, withdraw', async function () { withdrawalTest(
if (await isMainnet(env)) { 'deposit, transfer, withdraw',
console.log('Skipping withdrawals test on mainnet.') async () => {
this.skip() // 1. deposit
return const amount = utils.parseEther('1')
} await env.waitForXDomainTransaction(
env.l1Bridge.depositETH(DEFAULT_TEST_GAS_L2, '0xFFFF', {
// 1. deposit value: amount,
const amount = utils.parseEther('1') gasLimit: DEFAULT_TEST_GAS_L1,
await env.waitForXDomainTransaction( }),
env.l1Bridge.depositETH(DEFAULT_TEST_GAS_L2, '0xFFFF', { Direction.L1ToL2
value: amount,
gasLimit: DEFAULT_TEST_GAS_L1,
}),
Direction.L1ToL2
)
// 2. transfer to another address
const other = Wallet.createRandom().connect(env.l2Wallet.provider)
const tx = await env.l2Wallet.sendTransaction({
to: other.address,
value: amount,
})
await tx.wait()
const l1BalanceBefore = await other
.connect(env.l1Wallet.provider)
.getBalance()
// 3. do withdrawal
const withdrawnAmount = utils.parseEther('0.95')
const transaction = await env.l2Bridge
.connect(other)
.withdraw(
predeploys.OVM_ETH,
withdrawnAmount,
DEFAULT_TEST_GAS_L1,
'0xFFFF'
) )
await transaction.wait()
await env.relayXDomainMessages(transaction)
const receipts = await env.waitForXDomainTransaction(
transaction,
Direction.L2ToL1
)
// Compute the L1 portion of the fee // 2. transfer to another address
const l1Fee = await env.gasPriceOracle.getL1Fee( const other = Wallet.createRandom().connect(env.l2Wallet.provider)
serialize({ const tx = await env.l2Wallet.sendTransaction({
nonce: transaction.nonce, to: other.address,
value: transaction.value, value: amount,
gasPrice: transaction.gasPrice,
gasLimit: transaction.gasLimit,
to: transaction.to,
data: transaction.data,
}) })
) await tx.wait()
const l1BalanceBefore = await other
.connect(env.l1Wallet.provider)
.getBalance()
// 3. do withdrawal
const withdrawnAmount = utils.parseEther('0.95')
const transaction = await env.l2Bridge
.connect(other)
.withdraw(
predeploys.OVM_ETH,
withdrawnAmount,
DEFAULT_TEST_GAS_L1,
'0xFFFF'
)
await transaction.wait()
await env.relayXDomainMessages(transaction)
const receipts = await env.waitForXDomainTransaction(
transaction,
Direction.L2ToL1
)
// check that correct amount was withdrawn and that fee was charged // Compute the L1 portion of the fee
const l2Fee = receipts.tx.gasPrice.mul(receipts.receipt.gasUsed) const l1Fee = await env.gasPriceOracle.getL1Fee(
serialize({
nonce: transaction.nonce,
value: transaction.value,
gasPrice: transaction.gasPrice,
gasLimit: transaction.gasLimit,
to: transaction.to,
data: transaction.data,
})
)
const fee = l1Fee.add(l2Fee) // check that correct amount was withdrawn and that fee was charged
const l1BalanceAfter = await other const l2Fee = receipts.tx.gasPrice.mul(receipts.receipt.gasUsed)
.connect(env.l1Wallet.provider)
.getBalance() const fee = l1Fee.add(l2Fee)
const l2BalanceAfter = await other.getBalance() const l1BalanceAfter = await other
expect(l1BalanceAfter).to.deep.eq(l1BalanceBefore.add(withdrawnAmount)) .connect(env.l1Wallet.provider)
expect(l2BalanceAfter).to.deep.eq(amount.sub(withdrawnAmount).sub(fee)) .getBalance()
}) const l2BalanceAfter = await other.getBalance()
expect(l1BalanceAfter).to.deep.eq(l1BalanceBefore.add(withdrawnAmount))
expect(l2BalanceAfter).to.deep.eq(amount.sub(withdrawnAmount).sub(fee))
},
envConfig.MOCHA_TIMEOUT * 3
)
}) })
import { expect } from './shared/setup'
/* Imports: External */ /* Imports: External */
import { ethers } from 'hardhat' import { ethers } from 'hardhat'
import { injectL2Context, expectApprox } from '@eth-optimism/core-utils' import { injectL2Context, expectApprox } from '@eth-optimism/core-utils'
...@@ -7,7 +5,13 @@ import { predeploys } from '@eth-optimism/contracts' ...@@ -7,7 +5,13 @@ import { predeploys } from '@eth-optimism/contracts'
import { Contract, BigNumber } from 'ethers' import { Contract, BigNumber } from 'ethers'
/* Imports: Internal */ /* Imports: Internal */
import { l2Provider, l1Provider, IS_LIVE_NETWORK } from './shared/utils' import { expect } from './shared/setup'
import {
l2Provider,
l1Provider,
envConfig,
DEFAULT_TEST_GAS_L1,
} from './shared/utils'
import { OptimismEnv } from './shared/env' import { OptimismEnv } from './shared/env'
import { Direction } from './shared/watcher-utils' import { Direction } from './shared/watcher-utils'
...@@ -23,29 +27,25 @@ describe('OVM Context: Layer 2 EVM Context', () => { ...@@ -23,29 +27,25 @@ describe('OVM Context: Layer 2 EVM Context', () => {
env = await OptimismEnv.new() env = await OptimismEnv.new()
}) })
let OVMMulticall: Contract let Multicall: Contract
let OVMContextStorage: Contract let OVMContextStorage: Contract
beforeEach(async () => { beforeEach(async () => {
const OVMContextStorageFactory = await ethers.getContractFactory( const OVMContextStorageFactory = await ethers.getContractFactory(
'OVMContextStorage', 'OVMContextStorage',
env.l2Wallet env.l2Wallet
) )
const OVMMulticallFactory = await ethers.getContractFactory( const MulticallFactory = await ethers.getContractFactory(
'OVMMulticall', 'Multicall',
env.l2Wallet env.l2Wallet
) )
OVMContextStorage = await OVMContextStorageFactory.deploy() OVMContextStorage = await OVMContextStorageFactory.deploy()
await OVMContextStorage.deployTransaction.wait() await OVMContextStorage.deployTransaction.wait()
OVMMulticall = await OVMMulticallFactory.deploy() Multicall = await MulticallFactory.deploy()
await OVMMulticall.deployTransaction.wait() await Multicall.deployTransaction.wait()
}) })
let numTxs = 5 const numTxs = envConfig.OVMCONTEXT_SPEC_NUM_TXS
if (IS_LIVE_NETWORK) {
// Tests take way too long if we don't reduce the number of txs here.
numTxs = 1
}
it('enqueue: L1 contextual values are correctly set in L2', async () => { it('enqueue: L1 contextual values are correctly set in L2', async () => {
for (let i = 0; i < numTxs; i++) { for (let i = 0; i < numTxs; i++) {
...@@ -54,7 +54,10 @@ describe('OVM Context: Layer 2 EVM Context', () => { ...@@ -54,7 +54,10 @@ describe('OVM Context: Layer 2 EVM Context', () => {
const tx = await env.l1Messenger.sendMessage( const tx = await env.l1Messenger.sendMessage(
OVMContextStorage.address, OVMContextStorage.address,
'0x', '0x',
2_000_000 2_000_000,
{
gasLimit: DEFAULT_TEST_GAS_L1,
}
) )
// Wait for the transaction to be sent over to L2. // Wait for the transaction to be sent over to L2.
...@@ -89,7 +92,7 @@ describe('OVM Context: Layer 2 EVM Context', () => { ...@@ -89,7 +92,7 @@ describe('OVM Context: Layer 2 EVM Context', () => {
const coinbase = await OVMContextStorage.coinbases(i) const coinbase = await OVMContextStorage.coinbases(i)
expect(coinbase).to.equal(predeploys.OVM_SequencerFeeVault) expect(coinbase).to.equal(predeploys.OVM_SequencerFeeVault)
} }
}).timeout(150000) // this specific test takes a while because it involves L1 to L2 txs })
it('should set correct OVM Context for `eth_call`', async () => { it('should set correct OVM Context for `eth_call`', async () => {
for (let i = 0; i < numTxs; i++) { for (let i = 0; i < numTxs; i++) {
...@@ -101,21 +104,23 @@ describe('OVM Context: Layer 2 EVM Context', () => { ...@@ -101,21 +104,23 @@ describe('OVM Context: Layer 2 EVM Context', () => {
await dummyTx.wait() await dummyTx.wait()
const block = await L2Provider.getBlockWithTransactions('latest') const block = await L2Provider.getBlockWithTransactions('latest')
const [, returnData] = await OVMMulticall.callStatic.aggregate( const [, returnData] = await Multicall.callStatic.aggregate(
[ [
[ [
OVMMulticall.address, OVMContextStorage.address,
OVMMulticall.interface.encodeFunctionData( OVMContextStorage.interface.encodeFunctionData(
'getCurrentBlockTimestamp' 'getCurrentBlockTimestamp'
), ),
], ],
[ [
OVMMulticall.address, OVMContextStorage.address,
OVMMulticall.interface.encodeFunctionData('getCurrentBlockNumber'), OVMContextStorage.interface.encodeFunctionData(
'getCurrentBlockNumber'
),
], ],
[ [
OVMMulticall.address, OVMContextStorage.address,
OVMMulticall.interface.encodeFunctionData( OVMContextStorage.interface.encodeFunctionData(
'getCurrentL1BlockNumber' 'getCurrentL1BlockNumber'
), ),
], ],
...@@ -141,19 +146,23 @@ describe('OVM Context: Layer 2 EVM Context', () => { ...@@ -141,19 +146,23 @@ describe('OVM Context: Layer 2 EVM Context', () => {
*/ */
it('should return same timestamp and blocknumbers between `eth_call` and `rollup_getInfo`', async () => { it('should return same timestamp and blocknumbers between `eth_call` and `rollup_getInfo`', async () => {
// As atomically as possible, call `rollup_getInfo` and OVMMulticall for the // As atomically as possible, call `rollup_getInfo` and Multicall for the
// blocknumber and timestamp. If this is not atomic, then the sequencer can // blocknumber and timestamp. If this is not atomic, then the sequencer can
// happend to update the timestamp between the `eth_call` and the `rollup_getInfo` // happend to update the timestamp between the `eth_call` and the `rollup_getInfo`
const [info, [, returnData]] = await Promise.all([ const [info, [, returnData]] = await Promise.all([
L2Provider.send('rollup_getInfo', []), L2Provider.send('rollup_getInfo', []),
OVMMulticall.callStatic.aggregate([ Multicall.callStatic.aggregate([
[ [
OVMMulticall.address, OVMContextStorage.address,
OVMMulticall.interface.encodeFunctionData('getCurrentBlockTimestamp'), OVMContextStorage.interface.encodeFunctionData(
'getCurrentBlockTimestamp'
),
], ],
[ [
OVMMulticall.address, OVMContextStorage.address,
OVMMulticall.interface.encodeFunctionData('getCurrentL1BlockNumber'), OVMContextStorage.interface.encodeFunctionData(
'getCurrentL1BlockNumber'
),
], ],
]), ]),
]) ])
......
import { expect } from './shared/setup'
/* Imports: Internal */ /* Imports: Internal */
import { ethers } from 'ethers' import { ethers } from 'ethers'
import { predeploys, getContractInterface } from '@eth-optimism/contracts' import { predeploys, getContractInterface } from '@eth-optimism/contracts'
/* Imports: External */ /* Imports: External */
import { expect } from './shared/setup'
import { OptimismEnv } from './shared/env' import { OptimismEnv } from './shared/env'
describe('predeploys', () => { describe('predeploys', () => {
......
import { expect } from './shared/setup'
/* Imports: Internal */ /* Imports: Internal */
import { providers } from 'ethers' import { providers } from 'ethers'
import { injectL2Context, applyL1ToL2Alias } from '@eth-optimism/core-utils' import { injectL2Context, applyL1ToL2Alias } from '@eth-optimism/core-utils'
/* Imports: External */ /* Imports: External */
import { expect } from './shared/setup'
import { OptimismEnv } from './shared/env' import { OptimismEnv } from './shared/env'
import { Direction } from './shared/watcher-utils' import { Direction } from './shared/watcher-utils'
import { isLiveNetwork } from './shared/utils' import { DEFAULT_TEST_GAS_L1, envConfig } from './shared/utils'
describe('Queue Ingestion', () => { describe('Queue Ingestion', () => {
let env: OptimismEnv let env: OptimismEnv
...@@ -21,7 +20,7 @@ describe('Queue Ingestion', () => { ...@@ -21,7 +20,7 @@ describe('Queue Ingestion', () => {
// that are in the queue and submit them. L2 will pick up the // that are in the queue and submit them. L2 will pick up the
// sequencer batch appended event and play the transactions. // sequencer batch appended event and play the transactions.
it('should order transactions correctly', async () => { it('should order transactions correctly', async () => {
const numTxs = 5 const numTxs = envConfig.OVMCONTEXT_SPEC_NUM_TXS
// Enqueue some transactions by building the calldata and then sending // Enqueue some transactions by building the calldata and then sending
// the transaction to Layer 1 // the transaction to Layer 1
...@@ -30,7 +29,10 @@ describe('Queue Ingestion', () => { ...@@ -30,7 +29,10 @@ describe('Queue Ingestion', () => {
const tx = await env.l1Messenger.sendMessage( const tx = await env.l1Messenger.sendMessage(
`0x${`${i}`.repeat(40)}`, `0x${`${i}`.repeat(40)}`,
`0x0${i}`, `0x0${i}`,
1_000_000 1_000_000,
{
gasLimit: DEFAULT_TEST_GAS_L1,
}
) )
await tx.wait() await tx.wait()
txs.push(tx) txs.push(tx)
...@@ -62,5 +64,5 @@ describe('Queue Ingestion', () => { ...@@ -62,5 +64,5 @@ describe('Queue Ingestion', () => {
) )
expect(l2Tx.l1BlockNumber).to.equal(l1TxReceipt.blockNumber) expect(l2Tx.l1BlockNumber).to.equal(l1TxReceipt.blockNumber)
} }
}).timeout(isLiveNetwork() ? 300_000 : 100_000) })
}) })
import { TransactionReceipt } from '@ethersproject/abstract-provider'
import { expect } from './shared/setup' import { expect } from './shared/setup'
import { OptimismEnv } from './shared/env' import { OptimismEnv } from './shared/env'
import { import {
defaultTransactionFactory, defaultTransactionFactory,
gasPriceForL2, gasPriceForL2,
sleep, sleep,
isLiveNetwork, envConfig,
} from './shared/utils' } from './shared/utils'
import { TransactionReceipt } from '@ethersproject/abstract-provider'
describe('Replica Tests', () => { describe('Replica Tests', () => {
let env: OptimismEnv let env: OptimismEnv
before(async () => { before(async function () {
if (!envConfig.RUN_REPLICA_TESTS) {
this.skip()
return
}
env = await OptimismEnv.new() env = await OptimismEnv.new()
}) })
describe('Matching blocks', () => { describe('Matching blocks', () => {
if (isLiveNetwork()) {
console.log('Skipping replica tests on live network')
return
}
it('should sync a transaction', async () => { it('should sync a transaction', async () => {
const tx = defaultTransactionFactory() const tx = defaultTransactionFactory()
tx.gasPrice = await gasPriceForL2(env) tx.gasPrice = await gasPriceForL2()
const result = await env.l2Wallet.sendTransaction(tx) const result = await env.l2Wallet.sendTransaction(tx)
let receipt: TransactionReceipt let receipt: TransactionReceipt
...@@ -48,7 +49,7 @@ describe('Replica Tests', () => { ...@@ -48,7 +49,7 @@ describe('Replica Tests', () => {
const tx = { const tx = {
...defaultTransactionFactory(), ...defaultTransactionFactory(),
nonce: await env.l2Wallet.getTransactionCount(), nonce: await env.l2Wallet.getTransactionCount(),
gasPrice: await gasPriceForL2(env), gasPrice: await gasPriceForL2(),
chainId: null, // Disables EIP155 transaction signing. chainId: null, // Disables EIP155 transaction signing.
} }
const signed = await env.l2Wallet.signTransaction(tx) const signed = await env.l2Wallet.signTransaction(tx)
...@@ -76,7 +77,7 @@ describe('Replica Tests', () => { ...@@ -76,7 +77,7 @@ describe('Replica Tests', () => {
const tx = { const tx = {
...defaultTransactionFactory(), ...defaultTransactionFactory(),
nonce: await env.l2Wallet.getTransactionCount(), nonce: await env.l2Wallet.getTransactionCount(),
gasPrice: await gasPriceForL2(env), gasPrice: await gasPriceForL2(),
} }
const signed = await env.l2Wallet.signTransaction(tx) const signed = await env.l2Wallet.signTransaction(tx)
const result = await env.replicaProvider.sendTransaction(signed) const result = await env.replicaProvider.sendTransaction(signed)
......
import { expect } from './shared/setup'
import { expectApprox, injectL2Context } from '@eth-optimism/core-utils' import { expectApprox, injectL2Context } from '@eth-optimism/core-utils'
import { Wallet, BigNumber, Contract, ContractFactory, constants } from 'ethers' import { Wallet, BigNumber, Contract, ContractFactory, constants } from 'ethers'
import { serialize } from '@ethersproject/transactions' import { serialize } from '@ethersproject/transactions'
import { ethers } from 'hardhat' import { ethers } from 'hardhat'
import {
TransactionReceipt,
TransactionRequest,
} from '@ethersproject/providers'
import { import {
sleep, sleep,
l2Provider, l2Provider,
defaultTransactionFactory, defaultTransactionFactory,
fundUser, fundUser,
L2_CHAINID, L2_CHAINID,
isLiveNetwork,
gasPriceForL2, gasPriceForL2,
isHardhat,
hardhatTest,
envConfig,
} from './shared/utils' } from './shared/utils'
import { OptimismEnv } from './shared/env' import { OptimismEnv } from './shared/env'
import { import { expect } from './shared/setup'
TransactionReceipt,
TransactionRequest,
} from '@ethersproject/providers'
import simpleStorageJson from '../artifacts/contracts/SimpleStorage.sol/SimpleStorage.json'
describe('Basic RPC tests', () => { describe('Basic RPC tests', () => {
let env: OptimismEnv let env: OptimismEnv
...@@ -57,7 +58,7 @@ describe('Basic RPC tests', () => { ...@@ -57,7 +58,7 @@ describe('Basic RPC tests', () => {
describe('eth_sendRawTransaction', () => { describe('eth_sendRawTransaction', () => {
it('should correctly process a valid transaction', async () => { it('should correctly process a valid transaction', async () => {
const tx = defaultTransactionFactory() const tx = defaultTransactionFactory()
tx.gasPrice = await gasPriceForL2(env) tx.gasPrice = await gasPriceForL2()
const nonce = await wallet.getTransactionCount() const nonce = await wallet.getTransactionCount()
const result = await wallet.sendTransaction(tx) const result = await wallet.sendTransaction(tx)
...@@ -71,7 +72,7 @@ describe('Basic RPC tests', () => { ...@@ -71,7 +72,7 @@ describe('Basic RPC tests', () => {
it('should not accept a transaction with the wrong chain ID', async () => { it('should not accept a transaction with the wrong chain ID', async () => {
const tx = { const tx = {
...defaultTransactionFactory(), ...defaultTransactionFactory(),
gasPrice: await gasPriceForL2(env), gasPrice: await gasPriceForL2(),
chainId: (await wallet.getChainId()) + 1, chainId: (await wallet.getChainId()) + 1,
} }
...@@ -84,7 +85,7 @@ describe('Basic RPC tests', () => { ...@@ -84,7 +85,7 @@ describe('Basic RPC tests', () => {
const tx = { const tx = {
...defaultTransactionFactory(), ...defaultTransactionFactory(),
nonce: await wallet.getTransactionCount(), nonce: await wallet.getTransactionCount(),
gasPrice: await gasPriceForL2(env), gasPrice: await gasPriceForL2(),
chainId: null, // Disables EIP155 transaction signing. chainId: null, // Disables EIP155 transaction signing.
} }
const signed = await wallet.signTransaction(tx) const signed = await wallet.signTransaction(tx)
...@@ -98,7 +99,7 @@ describe('Basic RPC tests', () => { ...@@ -98,7 +99,7 @@ describe('Basic RPC tests', () => {
it('should accept a transaction with a value', async () => { it('should accept a transaction with a value', async () => {
const tx = { const tx = {
...defaultTransactionFactory(), ...defaultTransactionFactory(),
gasPrice: await gasPriceForL2(env), gasPrice: await gasPriceForL2(),
chainId: await env.l2Wallet.getChainId(), chainId: await env.l2Wallet.getChainId(),
data: '0x', data: '0x',
value: ethers.utils.parseEther('0.1'), value: ethers.utils.parseEther('0.1'),
...@@ -118,7 +119,7 @@ describe('Basic RPC tests', () => { ...@@ -118,7 +119,7 @@ describe('Basic RPC tests', () => {
const balance = await env.l2Wallet.getBalance() const balance = await env.l2Wallet.getBalance()
const tx = { const tx = {
...defaultTransactionFactory(), ...defaultTransactionFactory(),
gasPrice: await gasPriceForL2(env), gasPrice: await gasPriceForL2(),
chainId: await env.l2Wallet.getChainId(), chainId: await env.l2Wallet.getChainId(),
data: '0x', data: '0x',
value: balance.add(ethers.utils.parseEther('1')), value: balance.add(ethers.utils.parseEther('1')),
...@@ -138,32 +139,29 @@ describe('Basic RPC tests', () => { ...@@ -138,32 +139,29 @@ describe('Basic RPC tests', () => {
}) })
it('should reject a transaction with too low of a fee', async () => { it('should reject a transaction with too low of a fee', async () => {
if (isLiveNetwork()) { const isHH = await isHardhat()
console.log('Skipping too low of a fee test on live network') let gasPrice
return if (isHH) {
gasPrice = await env.gasPriceOracle.gasPrice()
await env.gasPriceOracle.setGasPrice(1000)
} }
const gasPrice = await env.gasPriceOracle.gasPrice()
await env.gasPriceOracle.setGasPrice(1000)
const tx = { const tx = {
...defaultTransactionFactory(), ...defaultTransactionFactory(),
gasPrice: 1, gasPrice: 1,
} }
await expect(env.l2Wallet.sendTransaction(tx)).to.be.rejectedWith( await expect(env.l2Wallet.sendTransaction(tx)).to.be.rejectedWith(
`gas price too low: 1 wei, use at least tx.gasPrice = 1000 wei` /gas price too low: 1 wei, use at least tx\.gasPrice = \d+ wei/
) )
// Reset the gas price to its original price
await env.gasPriceOracle.setGasPrice(gasPrice)
})
it('should reject a transaction with too high of a fee', async () => { if (isHH) {
if (isLiveNetwork()) { // Reset the gas price to its original price
console.log('Skpping too high of a fee test on live network') await env.gasPriceOracle.setGasPrice(gasPrice)
return
} }
})
it('should reject a transaction with too high of a fee', async () => {
const gasPrice = await env.gasPriceOracle.gasPrice() const gasPrice = await env.gasPriceOracle.gasPrice()
const largeGasPrice = gasPrice.mul(10) const largeGasPrice = gasPrice.mul(10)
const tx = { const tx = {
...@@ -333,7 +331,7 @@ describe('Basic RPC tests', () => { ...@@ -333,7 +331,7 @@ describe('Basic RPC tests', () => {
it('includes L1 gas price and L1 gas used', async () => { it('includes L1 gas price and L1 gas used', async () => {
const tx = await env.l2Wallet.populateTransaction({ const tx = await env.l2Wallet.populateTransaction({
to: env.l2Wallet.address, to: env.l2Wallet.address,
gasPrice: isLiveNetwork() ? 10000 : 1, gasPrice: await gasPriceForL2(),
}) })
const raw = serialize({ const raw = serialize({
...@@ -368,7 +366,7 @@ describe('Basic RPC tests', () => { ...@@ -368,7 +366,7 @@ describe('Basic RPC tests', () => {
describe('eth_getTransactionByHash', () => { describe('eth_getTransactionByHash', () => {
it('should be able to get all relevant l1/l2 transaction data', async () => { it('should be able to get all relevant l1/l2 transaction data', async () => {
const tx = defaultTransactionFactory() const tx = defaultTransactionFactory()
tx.gasPrice = await gasPriceForL2(env) tx.gasPrice = await gasPriceForL2()
const result = await wallet.sendTransaction(tx) const result = await wallet.sendTransaction(tx)
await result.wait() await result.wait()
...@@ -383,7 +381,7 @@ describe('Basic RPC tests', () => { ...@@ -383,7 +381,7 @@ describe('Basic RPC tests', () => {
it('should return the block and all included transactions', async () => { it('should return the block and all included transactions', async () => {
// Send a transaction and wait for it to be mined. // Send a transaction and wait for it to be mined.
const tx = defaultTransactionFactory() const tx = defaultTransactionFactory()
tx.gasPrice = await gasPriceForL2(env) tx.gasPrice = await gasPriceForL2()
const result = await wallet.sendTransaction(tx) const result = await wallet.sendTransaction(tx)
const receipt = await result.wait() const receipt = await result.wait()
...@@ -409,32 +407,31 @@ describe('Basic RPC tests', () => { ...@@ -409,32 +407,31 @@ describe('Basic RPC tests', () => {
// Needs to be skipped on Prod networks because this test doesn't work when // Needs to be skipped on Prod networks because this test doesn't work when
// other people are sending transactions to the Sequencer at the same time // other people are sending transactions to the Sequencer at the same time
// as this test is running. // as this test is running.
it('should return the same result when new transactions are not applied', async function () { hardhatTest(
if (isLiveNetwork()) { 'should return the same result when new transactions are not applied',
this.skip() async () => {
} // Get latest block once to start.
const prev = await provider.getBlockWithTransactions('latest')
// Get latest block once to start. // set wait to null to allow a deep object comparison
const prev = await provider.getBlockWithTransactions('latest') prev.transactions[0].wait = null
// set wait to null to allow a deep object comparison
prev.transactions[0].wait = null // Over ten seconds, repeatedly check the latest block to make sure nothing has changed.
for (let i = 0; i < 5; i++) {
// Over ten seconds, repeatedly check the latest block to make sure nothing has changed. const latest = await provider.getBlockWithTransactions('latest')
for (let i = 0; i < 5; i++) { latest.transactions[0].wait = null
const latest = await provider.getBlockWithTransactions('latest') // Check each key of the transaction individually
latest.transactions[0].wait = null // for easy debugging if one field changes
// Check each key of the transaction individually for (const [key, value] of Object.entries(latest.transactions[0])) {
// for easy debugging if one field changes expect(value).to.deep.equal(
for (const [key, value] of Object.entries(latest.transactions[0])) { prev.transactions[0][key],
expect(value).to.deep.equal( `mismatch ${key}`
prev.transactions[0][key], )
`mismatch ${key}` }
) expect(latest).to.deep.equal(prev)
await sleep(2000)
} }
expect(latest).to.deep.equal(prev)
await sleep(2000)
} }
}) )
}) })
describe('eth_getBalance', () => { describe('eth_getBalance', () => {
...@@ -490,10 +487,15 @@ describe('Basic RPC tests', () => { ...@@ -490,10 +487,15 @@ describe('Basic RPC tests', () => {
}) })
describe('debug_traceTransaction', () => { describe('debug_traceTransaction', () => {
before(async function () {
if (!envConfig.RUN_DEBUG_TRACE_TESTS) {
this.skip()
}
})
it('should match debug_traceBlock', async () => { it('should match debug_traceBlock', async () => {
const storage = new ContractFactory( const storage = await ethers.getContractFactory(
simpleStorageJson.abi, 'SimpleStorage',
simpleStorageJson.bytecode,
env.l2Wallet env.l2Wallet
) )
const tx = (await storage.deploy()).deployTransaction const tx = (await storage.deploy()).deployTransaction
......
import { DockerComposeNetwork } from './shared/docker-compose'
before(async () => {
if (!process.env.NO_NETWORK) {
await new DockerComposeNetwork().up()
}
})
import * as compose from 'docker-compose'
import * as shell from 'shelljs'
import * as path from 'path'
type ServiceNames =
| 'batch_submitter'
| 'dtl'
| 'l2geth'
| 'relayer'
| 'verifier'
| 'replica'
const OPS_DIRECTORY = path.join(process.cwd(), '../ops')
const DEFAULT_SERVICES: ServiceNames[] = [
'batch_submitter',
'dtl',
'l2geth',
'relayer',
]
export class DockerComposeNetwork {
constructor(private readonly services: ServiceNames[] = DEFAULT_SERVICES) {}
async up(options?: compose.IDockerComposeOptions) {
const out = await compose.upMany(this.services, {
cwd: OPS_DIRECTORY,
...options,
})
const { err, exitCode } = out
if (!err || exitCode) {
console.error(err)
throw new Error(
'Unexpected error when starting docker-compose network, dumping output'
)
}
if (err.includes('Creating')) {
console.info(
'🐳 Tests required starting containers. Waiting for sequencer to ready.'
)
shell.exec(`${OPS_DIRECTORY}/scripts/wait-for-sequencer.sh`, {
cwd: OPS_DIRECTORY,
})
}
return out
}
async logs() {
return compose.logs(this.services, { cwd: OPS_DIRECTORY })
}
async stop(service: ServiceNames) {
return compose.stopOne(service, { cwd: OPS_DIRECTORY })
}
async rm() {
return compose.rm({ cwd: OPS_DIRECTORY })
}
}
...@@ -19,6 +19,8 @@ import { ...@@ -19,6 +19,8 @@ import {
getL1Bridge, getL1Bridge,
getL2Bridge, getL2Bridge,
sleep, sleep,
envConfig,
DEFAULT_TEST_GAS_L1,
} from './utils' } from './utils'
import { import {
initWatcher, initWatcher,
...@@ -83,8 +85,10 @@ export class OptimismEnv { ...@@ -83,8 +85,10 @@ export class OptimismEnv {
// fund the user if needed // fund the user if needed
const balance = await l2Wallet.getBalance() const balance = await l2Wallet.getBalance()
if (balance.lt(utils.parseEther('1'))) { const min = envConfig.L2_WALLET_MIN_BALANCE_ETH.toString()
await fundUser(watcher, l1Bridge, utils.parseEther('1').sub(balance)) const topUp = envConfig.L2_WALLET_TOP_UP_AMOUNT_ETH.toString()
if (balance.lt(utils.parseEther(min))) {
await fundUser(watcher, l1Bridge, utils.parseEther(topUp))
} }
const l1Messenger = getContractFactory('L1CrossDomainMessenger') const l1Messenger = getContractFactory('L1CrossDomainMessenger')
.connect(l1Wallet) .connect(l1Wallet)
...@@ -156,6 +160,7 @@ export class OptimismEnv { ...@@ -156,6 +160,7 @@ export class OptimismEnv {
tx: Promise<TransactionResponse> | TransactionResponse tx: Promise<TransactionResponse> | TransactionResponse
): Promise<void> { ): Promise<void> {
tx = await tx tx = await tx
await tx.wait()
let messagePairs = [] let messagePairs = []
while (true) { while (true) {
...@@ -187,7 +192,10 @@ export class OptimismEnv { ...@@ -187,7 +192,10 @@ export class OptimismEnv {
message.sender, message.sender,
message.message, message.message,
message.messageNonce, message.messageNonce,
proof proof,
{
gasLimit: DEFAULT_TEST_GAS_L1 * 10,
}
) )
await result.wait() await result.wait()
break break
......
...@@ -23,7 +23,7 @@ export const fundRandomWallet = async ( ...@@ -23,7 +23,7 @@ export const fundRandomWallet = async (
const fundTx = await env.l1Wallet.sendTransaction({ const fundTx = await env.l1Wallet.sendTransaction({
gasLimit: 25_000, gasLimit: 25_000,
to: wallet.address, to: wallet.address,
gasPrice: await gasPriceForL1(env), gasPrice: await gasPriceForL1(),
value, value,
}) })
await fundTx.wait() await fundTx.wait()
...@@ -47,7 +47,7 @@ export const executeL1ToL2Transaction = async ( ...@@ -47,7 +47,7 @@ export const executeL1ToL2Transaction = async (
), ),
MESSAGE_GAS, MESSAGE_GAS,
{ {
gasPrice: await gasPriceForL1(env), gasPrice: await gasPriceForL1(),
} }
) )
) )
...@@ -71,7 +71,7 @@ export const executeL2ToL1Transaction = async ( ...@@ -71,7 +71,7 @@ export const executeL2ToL1Transaction = async (
), ),
MESSAGE_GAS, MESSAGE_GAS,
{ {
gasPrice: gasPriceForL2(env), gasPrice: gasPriceForL2(),
} }
) )
) )
...@@ -90,7 +90,7 @@ export const executeL2Transaction = async ( ...@@ -90,7 +90,7 @@ export const executeL2Transaction = async (
tx.contract tx.contract
.connect(signer) .connect(signer)
.functions[tx.functionName](...tx.functionParams, { .functions[tx.functionName](...tx.functionParams, {
gasPrice: gasPriceForL2(env), gasPrice: gasPriceForL2(),
}) })
) )
await result.wait() await result.wait()
......
...@@ -14,32 +14,54 @@ import { ...@@ -14,32 +14,54 @@ import {
predeploys, predeploys,
} from '@eth-optimism/contracts' } from '@eth-optimism/contracts'
import { injectL2Context, remove0x, Watcher } from '@eth-optimism/core-utils' import { injectL2Context, remove0x, Watcher } from '@eth-optimism/core-utils'
import { cleanEnv, str, num, bool } from 'envalid' import { cleanEnv, str, num, bool, makeValidator } from 'envalid'
import dotenv from 'dotenv' import dotenv from 'dotenv'
dotenv.config()
/* Imports: Internal */ /* Imports: Internal */
import { Direction, waitForXDomainTransaction } from './watcher-utils' import { Direction, waitForXDomainTransaction } from './watcher-utils'
import { OptimismEnv } from './env' import { OptimismEnv } from './env'
export const GWEI = BigNumber.from(1e9)
export const isLiveNetwork = () => { export const isLiveNetwork = () => {
return process.env.IS_LIVE_NETWORK === 'true' return process.env.IS_LIVE_NETWORK === 'true'
} }
if (isLiveNetwork()) { export const HARDHAT_CHAIN_ID = 31337
dotenv.config() export const DEFAULT_TEST_GAS_L1 = 330_000
} export const DEFAULT_TEST_GAS_L2 = 1_300_000
export const ON_CHAIN_GAS_PRICE = 'onchain'
const gasPriceValidator = makeValidator((gasPrice) => {
if (gasPrice === 'onchain') {
return gasPrice
}
const env = cleanEnv(process.env, { return num()._parse(gasPrice).toString()
})
const procEnv = cleanEnv(process.env, {
L1_GAS_PRICE: gasPriceValidator({
default: '0',
}),
L1_URL: str({ default: 'http://localhost:9545' }), L1_URL: str({ default: 'http://localhost:9545' }),
L2_URL: str({ default: 'http://localhost:8545' }),
VERIFIER_URL: str({ default: 'http://localhost:8547' }),
REPLICA_URL: str({ default: 'http://localhost:8549' }),
L1_POLLING_INTERVAL: num({ default: 10 }), L1_POLLING_INTERVAL: num({ default: 10 }),
L2_CHAINID: num({ default: 420 }),
L2_GAS_PRICE: gasPriceValidator({
default: 'onchain',
}),
L2_URL: str({ default: 'http://localhost:8545' }),
L2_POLLING_INTERVAL: num({ default: 10 }), L2_POLLING_INTERVAL: num({ default: 10 }),
VERIFIER_POLLING_INTERVAL: num({ default: 10 }), L2_WALLET_MIN_BALANCE_ETH: num({
default: 2,
}),
L2_WALLET_TOP_UP_AMOUNT_ETH: num({
default: 3,
}),
REPLICA_URL: str({ default: 'http://localhost:8549' }),
REPLICA_POLLING_INTERVAL: num({ default: 10 }), REPLICA_POLLING_INTERVAL: num({ default: 10 }),
PRIVATE_KEY: str({ PRIVATE_KEY: str({
default: default:
'0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80', '0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80',
...@@ -51,31 +73,53 @@ const env = cleanEnv(process.env, { ...@@ -51,31 +73,53 @@ const env = cleanEnv(process.env, {
default: default:
'0x8b3a350cf5c34c9194ca85829a2df0ec3153be0318b5e2d3348e872092edffba', '0x8b3a350cf5c34c9194ca85829a2df0ec3153be0318b5e2d3348e872092edffba',
}), }),
L2_CHAINID: num({ default: 420 }),
IS_LIVE_NETWORK: bool({ default: false }), OVMCONTEXT_SPEC_NUM_TXS: num({
default: 5,
}),
DTL_ENQUEUE_CONFIRMATIONS: num({
default: 0,
}),
RUN_WITHDRAWAL_TESTS: bool({
default: true,
}),
RUN_REPLICA_TESTS: bool({
default: true,
}),
RUN_DEBUG_TRACE_TESTS: bool({
default: true,
}),
RUN_STRESS_TESTS: bool({
default: true,
}),
MOCHA_TIMEOUT: num({
default: 120_000,
}),
MOCHA_BAIL: bool({
default: false,
}),
}) })
export const envConfig = procEnv
// The hardhat instance // The hardhat instance
export const l1Provider = new providers.JsonRpcProvider(env.L1_URL) export const l1Provider = new providers.JsonRpcProvider(procEnv.L1_URL)
l1Provider.pollingInterval = env.L1_POLLING_INTERVAL l1Provider.pollingInterval = procEnv.L1_POLLING_INTERVAL
export const l2Provider = injectL2Context( export const l2Provider = injectL2Context(
new providers.JsonRpcProvider(env.L2_URL) new providers.JsonRpcProvider(procEnv.L2_URL)
) )
l2Provider.pollingInterval = env.L2_POLLING_INTERVAL l2Provider.pollingInterval = procEnv.L2_POLLING_INTERVAL
export const verifierProvider = injectL2Context(
new providers.JsonRpcProvider(env.VERIFIER_URL)
)
verifierProvider.pollingInterval = env.VERIFIER_POLLING_INTERVAL
export const replicaProvider = injectL2Context( export const replicaProvider = injectL2Context(
new providers.JsonRpcProvider(env.REPLICA_URL) new providers.JsonRpcProvider(procEnv.REPLICA_URL)
) )
replicaProvider.pollingInterval = env.REPLICA_POLLING_INTERVAL replicaProvider.pollingInterval = procEnv.REPLICA_POLLING_INTERVAL
// The sequencer private key which is funded on L1 // The sequencer private key which is funded on L1
export const l1Wallet = new Wallet(env.PRIVATE_KEY, l1Provider) export const l1Wallet = new Wallet(procEnv.PRIVATE_KEY, l1Provider)
// A random private key which should always be funded with deposits from L1 -> L2 // A random private key which should always be funded with deposits from L1 -> L2
// if it's using non-0 gas price // if it's using non-0 gas price
...@@ -83,7 +127,7 @@ export const l2Wallet = l1Wallet.connect(l2Provider) ...@@ -83,7 +127,7 @@ export const l2Wallet = l1Wallet.connect(l2Provider)
// The owner of the GasPriceOracle on L2 // The owner of the GasPriceOracle on L2
export const gasPriceOracleWallet = new Wallet( export const gasPriceOracleWallet = new Wallet(
env.GAS_PRICE_ORACLE_PRIVATE_KEY, procEnv.GAS_PRICE_ORACLE_PRIVATE_KEY,
l2Provider l2Provider
) )
...@@ -92,13 +136,12 @@ export const PROXY_SEQUENCER_ENTRYPOINT_ADDRESS = ...@@ -92,13 +136,12 @@ export const PROXY_SEQUENCER_ENTRYPOINT_ADDRESS =
'0x4200000000000000000000000000000000000004' '0x4200000000000000000000000000000000000004'
export const OVM_ETH_ADDRESS = predeploys.OVM_ETH export const OVM_ETH_ADDRESS = predeploys.OVM_ETH
export const L2_CHAINID = env.L2_CHAINID export const L2_CHAINID = procEnv.L2_CHAINID
export const IS_LIVE_NETWORK = env.IS_LIVE_NETWORK
export const getAddressManager = (provider: any) => { export const getAddressManager = (provider: any) => {
return getContractFactory('Lib_AddressManager') return getContractFactory('Lib_AddressManager')
.connect(provider) .connect(provider)
.attach(env.ADDRESS_MANAGER) .attach(procEnv.ADDRESS_MANAGER)
} }
// Gets the bridge contract // Gets the bridge contract
...@@ -115,33 +158,17 @@ export const getL1Bridge = async (wallet: Wallet, AddressManager: Contract) => { ...@@ -115,33 +158,17 @@ export const getL1Bridge = async (wallet: Wallet, AddressManager: Contract) => {
throw new Error('Proxy__OVM_L1StandardBridge not found') throw new Error('Proxy__OVM_L1StandardBridge not found')
} }
const L1StandardBridge = new Contract( return new Contract(ProxyBridgeAddress, l1BridgeInterface, wallet)
ProxyBridgeAddress,
l1BridgeInterface,
wallet
)
return L1StandardBridge
} }
export const getL2Bridge = async (wallet: Wallet) => { export const getL2Bridge = async (wallet: Wallet) => {
const L2BridgeInterface = getContractInterface('L2StandardBridge') const L2BridgeInterface = getContractInterface('L2StandardBridge')
const L2StandardBridge = new Contract( return new Contract(predeploys.L2StandardBridge, L2BridgeInterface, wallet)
predeploys.L2StandardBridge,
L2BridgeInterface,
wallet
)
return L2StandardBridge
} }
export const getOvmEth = (wallet: Wallet) => { export const getOvmEth = (wallet: Wallet) => {
const OVM_ETH = new Contract( return new Contract(OVM_ETH_ADDRESS, getContractInterface('OVM_ETH'), wallet)
OVM_ETH_ADDRESS,
getContractInterface('OVM_ETH'),
wallet
)
return OVM_ETH
} }
export const fundUser = async ( export const fundUser = async (
...@@ -152,12 +179,54 @@ export const fundUser = async ( ...@@ -152,12 +179,54 @@ export const fundUser = async (
) => { ) => {
const value = BigNumber.from(amount) const value = BigNumber.from(amount)
const tx = recipient const tx = recipient
? bridge.depositETHTo(recipient, 1_300_000, '0x', { value }) ? bridge.depositETHTo(recipient, DEFAULT_TEST_GAS_L2, '0x', {
: bridge.depositETH(1_300_000, '0x', { value }) value,
gasLimit: DEFAULT_TEST_GAS_L1,
})
: bridge.depositETH(DEFAULT_TEST_GAS_L2, '0x', {
value,
gasLimit: DEFAULT_TEST_GAS_L1,
})
await waitForXDomainTransaction(watcher, tx, Direction.L1ToL2) await waitForXDomainTransaction(watcher, tx, Direction.L1ToL2)
} }
export const conditionalTest = (
condition: (env?: OptimismEnv) => Promise<boolean>,
name,
fn,
message?: string,
timeout?: number
) => {
it(name, async function () {
const shouldRun = await condition()
if (!shouldRun) {
console.log(message)
this.skip()
return
}
await fn()
}).timeout(timeout || envConfig.MOCHA_TIMEOUT)
}
export const withdrawalTest = (name, fn, timeout?: number) =>
conditionalTest(
() => Promise.resolve(procEnv.RUN_WITHDRAWAL_TESTS),
name,
fn,
`Skipping withdrawal test.`,
timeout
)
export const hardhatTest = (name, fn) =>
conditionalTest(
isHardhat,
name,
fn,
'Skipping test on non-Hardhat environment.'
)
export const sleep = (ms: number) => new Promise((r) => setTimeout(r, ms)) export const sleep = (ms: number) => new Promise((r) => setTimeout(r, ms))
const abiCoder = new utils.AbiCoder() const abiCoder = new utils.AbiCoder()
...@@ -175,56 +244,23 @@ export const defaultTransactionFactory = () => { ...@@ -175,56 +244,23 @@ export const defaultTransactionFactory = () => {
} }
} }
export const waitForL2Geth = async ( export const gasPriceForL2 = async () => {
provider: providers.JsonRpcProvider if (procEnv.L2_GAS_PRICE === ON_CHAIN_GAS_PRICE) {
): Promise<providers.JsonRpcProvider> => { return l2Wallet.getGasPrice()
let ready: boolean = false
while (!ready) {
try {
await provider.getNetwork()
ready = true
} catch (error) {
await sleep(1000)
}
}
return injectL2Context(provider)
}
// eslint-disable-next-line @typescript-eslint/no-shadow
export const gasPriceForL2 = async (env: OptimismEnv) => {
// The integration tests enforce fees on L2
// which run against hardhat on L1. Update if
// geth --dev is adopted for L1
const chainId = await env.l1Wallet.getChainId()
if ((await isMainnet(env)) || chainId === 31337) {
return env.l2Wallet.getGasPrice()
} }
if (isLiveNetwork()) { return utils.parseUnits(procEnv.L2_GAS_PRICE, 'wei')
return Promise.resolve(BigNumber.from(10000))
}
return Promise.resolve(BigNumber.from(0))
} }
// eslint-disable-next-line @typescript-eslint/no-shadow export const gasPriceForL1 = async () => {
export const gasPriceForL1 = async (env: OptimismEnv) => { if (procEnv.L1_GAS_PRICE === ON_CHAIN_GAS_PRICE) {
const chainId = await env.l1Wallet.getChainId() return l1Wallet.getGasPrice()
switch (chainId) {
case 1:
return env.l1Wallet.getGasPrice()
case 3:
case 42:
return utils.parseUnits('10', 'gwei')
case 5:
return utils.parseUnits('2', 'gwei')
default:
return BigNumber.from(0)
} }
return utils.parseUnits(procEnv.L1_GAS_PRICE, 'wei')
} }
// eslint-disable-next-line @typescript-eslint/no-shadow export const isHardhat = async () => {
export const isMainnet = async (env: OptimismEnv) => { const chainId = await l1Wallet.getChainId()
const chainId = await env.l1Wallet.getChainId() return chainId === HARDHAT_CHAIN_ID
return chainId === 1
} }
...@@ -4,7 +4,6 @@ import { ...@@ -4,7 +4,6 @@ import {
TransactionResponse, TransactionResponse,
} from '@ethersproject/providers' } from '@ethersproject/providers'
import { Watcher } from '@eth-optimism/core-utils' import { Watcher } from '@eth-optimism/core-utils'
import { Contract, Transaction } from 'ethers' import { Contract, Transaction } from 'ethers'
export const initWatcher = async ( export const initWatcher = async (
......
import { expect } from './shared/setup'
/* Imports: External */ /* Imports: External */
import { Contract, ContractFactory, Wallet, utils } from 'ethers' import { Contract, Wallet, utils } from 'ethers'
import { ethers } from 'hardhat'
/* Imports: Internal */ /* Imports: Internal */
import { expect } from './shared/setup'
import { OptimismEnv } from './shared/env' import { OptimismEnv } from './shared/env'
import { import {
executeL1ToL2TransactionsParallel, executeL1ToL2TransactionsParallel,
...@@ -14,15 +14,13 @@ import { ...@@ -14,15 +14,13 @@ import {
executeRepeatedL2Transactions, executeRepeatedL2Transactions,
fundRandomWallet, fundRandomWallet,
} from './shared/stress-test-helpers' } from './shared/stress-test-helpers'
/* Imports: Artifacts */ /* Imports: Artifacts */
import simpleStorageJson from '../artifacts/contracts/SimpleStorage.sol/SimpleStorage.json' import { envConfig, fundUser } from './shared/utils'
import { fundUser, isLiveNetwork, isMainnet } from './shared/utils'
// Need a big timeout to allow for all transactions to be processed. // Need a big timeout to allow for all transactions to be processed.
// For some reason I can't figure out how to set the timeout on a per-suite basis // For some reason I can't figure out how to set the timeout on a per-suite basis
// so I'm instead setting it for every test. // so I'm instead setting it for every test.
const STRESS_TEST_TIMEOUT = isLiveNetwork() ? 500_000 : 1_200_000 const STRESS_TEST_TIMEOUT = envConfig.MOCHA_TIMEOUT * 5
describe('stress tests', () => { describe('stress tests', () => {
const numTransactions = 3 const numTransactions = 3
...@@ -32,13 +30,14 @@ describe('stress tests', () => { ...@@ -32,13 +30,14 @@ describe('stress tests', () => {
const wallets: Wallet[] = [] const wallets: Wallet[] = []
before(async function () { before(async function () {
env = await OptimismEnv.new() if (!envConfig.RUN_STRESS_TESTS) {
if (await isMainnet(env)) { console.log('Skipping stress tests.')
console.log('Skipping stress tests on mainnet.')
this.skip() this.skip()
return return
} }
env = await OptimismEnv.new()
for (let i = 0; i < numTransactions; i++) { for (let i = 0; i < numTransactions; i++) {
wallets.push(Wallet.createRandom()) wallets.push(Wallet.createRandom())
} }
...@@ -60,14 +59,12 @@ describe('stress tests', () => { ...@@ -60,14 +59,12 @@ describe('stress tests', () => {
let L2SimpleStorage: Contract let L2SimpleStorage: Contract
let L1SimpleStorage: Contract let L1SimpleStorage: Contract
beforeEach(async () => { beforeEach(async () => {
const factory__L1SimpleStorage = new ContractFactory( const factory__L1SimpleStorage = await ethers.getContractFactory(
simpleStorageJson.abi, 'SimpleStorage',
simpleStorageJson.bytecode,
env.l1Wallet env.l1Wallet
) )
const factory__L2SimpleStorage = new ContractFactory( const factory__L2SimpleStorage = await ethers.getContractFactory(
simpleStorageJson.abi, 'SimpleStorage',
simpleStorageJson.bytecode,
env.l2Wallet env.l2Wallet
) )
L1SimpleStorage = await factory__L1SimpleStorage.deploy() L1SimpleStorage = await factory__L1SimpleStorage.deploy()
......
import { expect } from './shared/setup'
/* Imports: External */ /* Imports: External */
import { ContractFactory } from 'ethers' import { ContractFactory } from 'ethers'
import { ethers } from 'hardhat' import { ethers } from 'hardhat'
import { predeploys } from '@eth-optimism/contracts' import { predeploys } from '@eth-optimism/contracts'
/* Imports: Internal */ /* Imports: Internal */
import { expect } from './shared/setup'
import { OptimismEnv } from './shared/env' import { OptimismEnv } from './shared/env'
import { l2Provider } from './shared/utils' import { l2Provider } from './shared/utils'
......
...@@ -17,15 +17,23 @@ ...@@ -17,15 +17,23 @@
package main package main
import ( import (
"bytes"
"crypto/sha256"
"encoding/json" "encoding/json"
"fmt" "fmt"
"io"
"io/ioutil"
"net/http"
"os" "os"
"path/filepath" "path/filepath"
"regexp"
"runtime" "runtime"
"strconv" "strconv"
"sync/atomic" "sync/atomic"
"time" "time"
"github.com/ethereum-optimism/optimism/l2geth/common/hexutil"
"github.com/ethereum-optimism/optimism/l2geth/cmd/utils" "github.com/ethereum-optimism/optimism/l2geth/cmd/utils"
"github.com/ethereum-optimism/optimism/l2geth/common" "github.com/ethereum-optimism/optimism/l2geth/common"
"github.com/ethereum-optimism/optimism/l2geth/console" "github.com/ethereum-optimism/optimism/l2geth/console"
...@@ -45,7 +53,7 @@ var ( ...@@ -45,7 +53,7 @@ var (
Action: utils.MigrateFlags(initGenesis), Action: utils.MigrateFlags(initGenesis),
Name: "init", Name: "init",
Usage: "Bootstrap and initialize a new genesis block", Usage: "Bootstrap and initialize a new genesis block",
ArgsUsage: "<genesisPath>", ArgsUsage: "<genesisPathOrUrl> (<genesisHash>)",
Flags: []cli.Flag{ Flags: []cli.Flag{
utils.DataDirFlag, utils.DataDirFlag,
}, },
...@@ -55,7 +63,22 @@ The init command initializes a new genesis block and definition for the network. ...@@ -55,7 +63,22 @@ The init command initializes a new genesis block and definition for the network.
This is a destructive action and changes the network in which you will be This is a destructive action and changes the network in which you will be
participating. participating.
It expects the genesis file as argument.`, It expects either a path or an HTTP URL to the genesis file as an argument. If an
HTTP URL is specified for the genesis file, then a hex-encoded SHA256 hash of the
genesis file must be included as a second argument. The hash provided on the CLI
will be checked against the hash of the genesis file downloaded from the URL.`,
}
dumpChainCfgCommand = cli.Command{
Action: utils.MigrateFlags(dumpChainCfg),
Name: "dump-chain-cfg",
Usage: "Dumps the current chain config to standard out.",
Flags: []cli.Flag{
utils.DataDirFlag,
},
Category: "BLOCKCHAIN COMMANDS",
Description: `
This command dumps the currently configured chain state to standard output. It
will fail if there is no genesis block configured.`,
} }
importCommand = cli.Command{ importCommand = cli.Command{
Action: utils.MigrateFlags(importChain), Action: utils.MigrateFlags(importChain),
...@@ -194,15 +217,50 @@ Use "ethereum dump 0" to dump the genesis block.`, ...@@ -194,15 +217,50 @@ Use "ethereum dump 0" to dump the genesis block.`,
// the zero'd block (i.e. genesis) or will fail hard if it can't succeed. // the zero'd block (i.e. genesis) or will fail hard if it can't succeed.
func initGenesis(ctx *cli.Context) error { func initGenesis(ctx *cli.Context) error {
// Make sure we have a valid genesis JSON // Make sure we have a valid genesis JSON
genesisPath := ctx.Args().First() genesisPathOrURL := ctx.Args().First()
if len(genesisPath) == 0 { if len(genesisPathOrURL) == 0 {
utils.Fatalf("Must supply path to genesis JSON file") utils.Fatalf("Must supply path or URL to genesis JSON file")
} }
file, err := os.Open(genesisPath)
if err != nil { var file io.ReadCloser
utils.Fatalf("Failed to read genesis file: %v", err) if matched, _ := regexp.MatchString("^http(s)?://", genesisPathOrURL); matched {
genesisHashStr := ctx.Args().Get(1)
if genesisHashStr == "" {
utils.Fatalf("Must specify a genesis hash argument if the genesis path argument is an URL.")
}
genesisHashData, err := hexutil.Decode(genesisHashStr)
if err != nil {
utils.Fatalf("Error decoding genesis hash: %v", err)
}
log.Info("Fetching genesis file", "url", genesisPathOrURL)
genesisData, err := fetchGenesis(genesisPathOrURL)
if err != nil {
utils.Fatalf("Failed to fetch genesis file: %v", err)
}
hash := sha256.New()
hash.Write(genesisData)
actualHash := hash.Sum(nil)
if !bytes.Equal(actualHash, genesisHashData) {
utils.Fatalf(
"Genesis hashes do not match. Need: %s, got: %s",
genesisHashStr,
hexutil.Encode(actualHash),
)
}
file = ioutil.NopCloser(bytes.NewReader(genesisData))
} else {
var err error
file, err = os.Open(genesisPathOrURL)
if err != nil {
utils.Fatalf("Failed to read genesis file: %v", err)
}
defer file.Close()
} }
defer file.Close()
genesis := new(core.Genesis) genesis := new(core.Genesis)
if err := json.NewDecoder(file).Decode(genesis); err != nil { if err := json.NewDecoder(file).Decode(genesis); err != nil {
...@@ -227,6 +285,30 @@ func initGenesis(ctx *cli.Context) error { ...@@ -227,6 +285,30 @@ func initGenesis(ctx *cli.Context) error {
return nil return nil
} }
// dumpChainCfg dumps chain config to standard output.
func dumpChainCfg(ctx *cli.Context) error {
stack := makeFullNode(ctx)
defer stack.Close()
db, err := stack.OpenDatabase("chaindata", 0, 0, "")
if err != nil {
utils.Fatalf("Failed to open database: %v", err)
}
stored := rawdb.ReadCanonicalHash(db, 0)
var zeroHash common.Hash
if stored == zeroHash {
utils.Fatalf("No genesis block configured.")
}
chainCfg := rawdb.ReadChainConfig(db, stored)
out, err := json.MarshalIndent(chainCfg, "", " ")
if err != nil {
utils.Fatalf("Failed to marshal chain config: %v", out)
}
fmt.Println(string(out))
return nil
}
func importChain(ctx *cli.Context) error { func importChain(ctx *cli.Context) error {
if len(ctx.Args()) < 1 { if len(ctx.Args()) < 1 {
utils.Fatalf("This command requires an argument.") utils.Fatalf("This command requires an argument.")
...@@ -557,3 +639,15 @@ func hashish(x string) bool { ...@@ -557,3 +639,15 @@ func hashish(x string) bool {
_, err := strconv.Atoi(x) _, err := strconv.Atoi(x)
return err != nil return err != nil
} }
func fetchGenesis(url string) ([]byte, error) {
client := &http.Client{
Timeout: 10 * time.Second,
}
resp, err := client.Get(url)
if err != nil {
return nil, err
}
defer resp.Body.Close()
return ioutil.ReadAll(resp.Body)
}
package main
import (
"io"
"net/http"
"net/http/httptest"
"os"
"testing"
)
func TestChainInit(t *testing.T) {
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(200)
f, err := os.Open("testdata/init.json")
if err != nil {
panic(err)
}
defer f.Close()
io.Copy(w, f)
}))
tests := []struct {
name string
url string
hash string
errorMsg string
}{
{
"no genesis hash specified",
server.URL,
"",
"Must specify a genesis hash argument if the genesis path argument is an URL",
},
{
"invalid genesis hash specified",
server.URL,
"not hex yo",
"Error decoding genesis hash",
},
{
"bad URL",
"https://honk",
"0x1234",
"Failed to fetch genesis file",
},
{
"mis-matched hashes",
server.URL,
"0x1234",
"Genesis hashes do not match",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
datadir := tmpdir(t)
geth := runGeth(t, "init", tt.url, tt.hash, "--datadir", datadir)
geth.ExpectRegexp(tt.errorMsg)
})
}
t.Run("URL and hash args OK", func(t *testing.T) {
datadir := tmpdir(t)
geth := runGeth(t, "init", server.URL, "0x1f0201852c30e203a701ac283aeafafaf55b2ad3ae2f4e8f15c61e761434fb62", "--datadir", datadir)
geth.ExpectExit()
geth = runGeth(t, "dump-chain-cfg", "--datadir", datadir)
geth.ExpectRegexp("\"muirGlacierBlock\": 500")
})
t.Run("file arg OK", func(t *testing.T) {
datadir := tmpdir(t)
geth := runGeth(t, "init", "testdata/init.json", "--datadir", datadir)
geth.ExpectExit()
geth = runGeth(t, "dump-chain-cfg", "--datadir", datadir)
geth.ExpectRegexp("\"muirGlacierBlock\": 500")
})
}
func TestDumpChainCfg(t *testing.T) {
datadir := tmpdir(t)
geth := runGeth(t, "init", "testdata/init.json", "--datadir", datadir)
geth.ExpectExit()
geth = runGeth(t, "dump-chain-cfg", "--datadir", datadir)
geth.Expect(`{
"chainId": 69,
"homesteadBlock": 0,
"eip150Block": 0,
"eip150Hash": "0x0000000000000000000000000000000000000000000000000000000000000000",
"eip155Block": 0,
"eip158Block": 0,
"byzantiumBlock": 0,
"constantinopleBlock": 0,
"petersburgBlock": 0,
"istanbulBlock": 0,
"muirGlacierBlock": 500,
"clique": {
"period": 0,
"epoch": 30000
}
}`)
}
...@@ -216,6 +216,7 @@ func init() { ...@@ -216,6 +216,7 @@ func init() {
app.Commands = []cli.Command{ app.Commands = []cli.Command{
// See chaincmd.go: // See chaincmd.go:
initCommand, initCommand,
dumpChainCfgCommand,
importCommand, importCommand,
exportCommand, exportCommand,
importPreimagesCommand, importPreimagesCommand,
......
{
"commit": "2625e9e101176bef7e6b3ddf951eb2de796a3f3d",
"config": {
"chainId": 69,
"homesteadBlock": 0,
"eip150Block": 0,
"eip155Block": 0,
"eip158Block": 0,
"byzantiumBlock": 0,
"constantinopleBlock": 0,
"petersburgBlock": 0,
"istanbulBlock": 0,
"muirGlacierBlock": 500,
"clique": {
"period": 0,
"epoch": 30000
}
},
"difficulty": "1",
"gasLimit": "15000000",
"extradata": "0x0000000000000000000000000000000000000000000000000000000000000000ca062b0fd91172d89bcd4bb084ac4e21972cc4670000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
"alloc": {
"0x4200000000000000000000000000000000000000": {
"balance": "00",
"storage": {},
"code": "0x608060405234801561001057600080fd5b50600436106100365760003560e01c806382e3702d1461003b578063cafa81dc14610072575b600080fd5b61005e610049366004610112565b60006020819052908152604090205460ff1681565b604051901515815260200160405180910390f35b61008561008036600461015a565b610087565b005b6001600080833360405160200161009f929190610229565b604080518083037fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe001815291815281516020928301208352908201929092520160002080547fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff001691151591909117905550565b60006020828403121561012457600080fd5b5035919050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052604160045260246000fd5b60006020828403121561016c57600080fd5b813567ffffffffffffffff8082111561018457600080fd5b818401915084601f83011261019857600080fd5b8135818111156101aa576101aa61012b565b604051601f82017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0908116603f011681019083821181831017156101f0576101f061012b565b8160405282815287602084870101111561020957600080fd5b826020860160208301376000928101602001929092525095945050505050565b6000835160005b8181101561024a5760208187018101518583015201610230565b81811115610259576000828501525b5060609390931b7fffffffffffffffffffffffffffffffffffffffff00000000000000000000000016919092019081526014019291505056fea26469706673582212209ffc0b44ce8a27c46cae74a3b3b620a72f10aaea97ed37c15b5d36792abd2aa464736f6c63430008090033"
},
"0x4200000000000000000000000000000000000002": {
"balance": "00",
"storage": {
"0x0000000000000000000000000000000000000000000000000000000000000000": "0x0000000000000000000000000000000000000000000000000000000000000000"
},
"code": "0x608060405234801561001057600080fd5b50600436106100725760003560e01c80639b19251a116100505780639b19251a146100e9578063b1540a011461011c578063bdc7b54f1461012f57600080fd5b806308fd63221461007757806313af40351461008c5780638da5cb5b1461009f575b600080fd5b61008a610085366004610614565b610137565b005b61008a61009a366004610650565b610271565b6000546100bf9073ffffffffffffffffffffffffffffffffffffffff1681565b60405173ffffffffffffffffffffffffffffffffffffffff90911681526020015b60405180910390f35b61010c6100f7366004610650565b60016020526000908152604090205460ff1681565b60405190151581526020016100e0565b61010c61012a366004610650565b61047c565b61008a6104cd565b60005473ffffffffffffffffffffffffffffffffffffffff1633146101e3576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152603a60248201527f46756e6374696f6e2063616e206f6e6c792062652063616c6c6564206279207460448201527f6865206f776e6572206f66207468697320636f6e74726163742e00000000000060648201526084015b60405180910390fd5b73ffffffffffffffffffffffffffffffffffffffff821660008181526001602090815260409182902080547fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff00168515159081179091558251938452908301527f8daaf060c3306c38e068a75c054bf96ecd85a3db1252712c4d93632744c42e0d910160405180910390a15050565b60005473ffffffffffffffffffffffffffffffffffffffff163314610318576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152603a60248201527f46756e6374696f6e2063616e206f6e6c792062652063616c6c6564206279207460448201527f6865206f776e6572206f66207468697320636f6e74726163742e00000000000060648201526084016101da565b73ffffffffffffffffffffffffffffffffffffffff81166103e1576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152605160248201527f4f564d5f4465706c6f79657257686974656c6973743a2063616e206f6e6c792060448201527f62652064697361626c65642076696120656e61626c654172626974726172794360648201527f6f6e74726163744465706c6f796d656e74000000000000000000000000000000608482015260a4016101da565b6000546040805173ffffffffffffffffffffffffffffffffffffffff928316815291831660208301527fb532073b38c83145e3e5135377a08bf9aab55bc0fd7c1179cd4fb995d2a5159c910160405180910390a1600080547fffffffffffffffffffffffff00000000000000000000000000000000000000001673ffffffffffffffffffffffffffffffffffffffff92909216919091179055565b6000805473ffffffffffffffffffffffffffffffffffffffff1615806104c7575073ffffffffffffffffffffffffffffffffffffffff821660009081526001602052604090205460ff165b92915050565b60005473ffffffffffffffffffffffffffffffffffffffff163314610574576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152603a60248201527f46756e6374696f6e2063616e206f6e6c792062652063616c6c6564206279207460448201527f6865206f776e6572206f66207468697320636f6e74726163742e00000000000060648201526084016101da565b60005460405173ffffffffffffffffffffffffffffffffffffffff90911681527fc0e106cf568e50698fdbde1eff56f5a5c966cc7958e37e276918e9e4ccdf8cd49060200160405180910390a1600080547fffffffffffffffffffffffff0000000000000000000000000000000000000000169055565b803573ffffffffffffffffffffffffffffffffffffffff8116811461060f57600080fd5b919050565b6000806040838503121561062757600080fd5b610630836105eb565b91506020830135801515811461064557600080fd5b809150509250929050565b60006020828403121561066257600080fd5b61066b826105eb565b939250505056fea264697066735822122045a02b3906eca00a51b37c2965ab13be381f71f60af681951849865fb2daa75f64736f6c63430008090033"
},
"0x4200000000000000000000000000000000000007": {
"balance": "00",
"storage": {
"0x0000000000000000000000000000000000000000000000000000000000000004": "0x000000000000000000000000000000000000000000000000000000000000dead",
"0x0000000000000000000000000000000000000000000000000000000000000005": "0x000000000000000000000000ea63b76403cdaaee329f2d252e1dc1af44aaa515",
"0x0000000000000000000000000000000000000000000000000000000000000003": "0x00000000000000000000000000000000000000000000000000000000000186a0"
},
"code": "0x608060405234801561001057600080fd5b50600436106100885760003560e01c8063a71198691161005b578063a71198691461012a578063b1b1b2091461014a578063cbd4ece91461016d578063ecc704281461018057600080fd5b806321d800ec1461008d5780633dbb202b146100c55780636e296e45146100da57806382e3702d14610107575b600080fd5b6100b061009b366004610826565b60006020819052908152604090205460ff1681565b60405190151581526020015b60405180910390f35b6100d86100d3366004610942565b610197565b005b6100e26102e2565b60405173ffffffffffffffffffffffffffffffffffffffff90911681526020016100bc565b6100b0610115366004610826565b60026020526000908152604090205460ff1681565b6005546100e29073ffffffffffffffffffffffffffffffffffffffff1681565b6100b0610158366004610826565b60016020526000908152604090205460ff1681565b6100d861017b3660046109ad565b61038b565b61018960035481565b6040519081526020016100bc565b60006101a784338560035461078d565b80516020808301919091206000908152600290915260409081902080547fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff00166001179055517fcafa81dc0000000000000000000000000000000000000000000000000000000081529091507342000000000000000000000000000000000000009063cafa81dc9061023c908490600401610a89565b600060405180830381600087803b15801561025657600080fd5b505af115801561026a573d6000803e3d6000fd5b505050508373ffffffffffffffffffffffffffffffffffffffff167fcb0f7ffd78f9aee47a248fae8db181db6eee833039123e026dcbff529522e52a3385600354866040516102bc9493929190610aa3565b60405180910390a26001600360008282546102d79190610aef565b909155505050505050565b60045460009073ffffffffffffffffffffffffffffffffffffffff1661dead141561036e576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152601f60248201527f78446f6d61696e4d65737361676553656e646572206973206e6f74207365740060448201526064015b60405180910390fd5b5060045473ffffffffffffffffffffffffffffffffffffffff1690565b60055473ffffffffffffffffffffffffffffffffffffffff167fffffffffffffffffffffffffeeeeffffffffffffffffffffffffffffffffeeef330173ffffffffffffffffffffffffffffffffffffffff161461046a576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152602760248201527f50726f7669646564206d65737361676520636f756c64206e6f7420626520766560448201527f7269666965642e000000000000000000000000000000000000000000000000006064820152608401610365565b60006104788585858561078d565b8051602080830191909120600081815260019092526040909120549192509060ff1615610527576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152602b60248201527f50726f7669646564206d6573736167652068617320616c72656164792062656560448201527f6e2072656365697665642e0000000000000000000000000000000000000000006064820152608401610365565b73ffffffffffffffffffffffffffffffffffffffff8616734200000000000000000000000000000000000000141561059957600090815260016020819052604090912080547fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff0016909117905550610787565b600480547fffffffffffffffffffffffff00000000000000000000000000000000000000001673ffffffffffffffffffffffffffffffffffffffff878116919091179091556040516000918816906105f2908790610b2e565b6000604051808303816000865af19150503d806000811461062f576040519150601f19603f3d011682016040523d82523d6000602084013e610634565b606091505b5050600480547fffffffffffffffffffffffff00000000000000000000000000000000000000001661dead1790559050801515600114156106d557600082815260016020819052604080832080547fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff00169092179091555183917f4641df4a962071e12719d8c8c8e5ac7fc4d97b927346a3d7a335b1f7517e133c91a2610701565b60405182907f99d0e048484baa1b1540b1367cb128acd7ab2946d1ed91ec10e3c85e4bf51b8f90600090a25b600083334360405160200161071893929190610b4a565b604080517fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe08184030181529181528151602092830120600090815291829052902080547fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff00166001179055505050505b50505050565b6060848484846040516024016107a69493929190610b9c565b604080517fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe08184030181529190526020810180517bffffffffffffffffffffffffffffffffffffffffffffffffffffffff167fcbd4ece9000000000000000000000000000000000000000000000000000000001790529050949350505050565b60006020828403121561083857600080fd5b5035919050565b803573ffffffffffffffffffffffffffffffffffffffff8116811461086357600080fd5b919050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052604160045260246000fd5b600082601f8301126108a857600080fd5b813567ffffffffffffffff808211156108c3576108c3610868565b604051601f83017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0908116603f0116810190828211818310171561090957610909610868565b8160405283815286602085880101111561092257600080fd5b836020870160208301376000602085830101528094505050505092915050565b60008060006060848603121561095757600080fd5b6109608461083f565b9250602084013567ffffffffffffffff81111561097c57600080fd5b61098886828701610897565b925050604084013563ffffffff811681146109a257600080fd5b809150509250925092565b600080600080608085870312156109c357600080fd5b6109cc8561083f565b93506109da6020860161083f565b9250604085013567ffffffffffffffff8111156109f657600080fd5b610a0287828801610897565b949793965093946060013593505050565b60005b83811015610a2e578181015183820152602001610a16565b838111156107875750506000910152565b60008151808452610a57816020860160208601610a13565b601f017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0169290920160200192915050565b602081526000610a9c6020830184610a3f565b9392505050565b73ffffffffffffffffffffffffffffffffffffffff85168152608060208201526000610ad26080830186610a3f565b905083604083015263ffffffff8316606083015295945050505050565b60008219821115610b29577f4e487b7100000000000000000000000000000000000000000000000000000000600052601160045260246000fd5b500190565b60008251610b40818460208701610a13565b9190910192915050565b60008451610b5c818460208901610a13565b60609490941b7fffffffffffffffffffffffffffffffffffffffff0000000000000000000000001691909301908152601481019190915260340192915050565b600073ffffffffffffffffffffffffffffffffffffffff808716835280861660208401525060806040830152610bd56080830185610a3f565b90508260608301529594505050505056fea26469706673582212204f4a72d27cfaca016d132f5b31a0bc1435985651da3646f381a8994056bdfaeb64736f6c63430008090033"
},
"0x420000000000000000000000000000000000000F": {
"balance": "00",
"storage": {
"0x0000000000000000000000000000000000000000000000000000000000000000": "0x000000000000000000000000fed8c59c7a0b898a8ef3365889ffee87d6e1f8b4",
"0x0000000000000000000000000000000000000000000000000000000000000001": "0x0000000000000000000000000000000000000000000000000000000000000001",
"0x0000000000000000000000000000000000000000000000000000000000000002": "0x0000000000000000000000000000000000000000000000000000000000000001",
"0x0000000000000000000000000000000000000000000000000000000000000003": "0x0000000000000000000000000000000000000000000000000000000000000abe",
"0x0000000000000000000000000000000000000000000000000000000000000004": "0x000000000000000000000000000000000000000000000000000000000016e360",
"0x0000000000000000000000000000000000000000000000000000000000000005": "0x0000000000000000000000000000000000000000000000000000000000000006"
},
"code": "0x608060405234801561001057600080fd5b50600436106100f55760003560e01c80638c8885c811610097578063de26c4a111610066578063de26c4a1146101cc578063f2fde38b146101df578063f45e65d8146101f2578063fe173b97146101fb57600080fd5b80638c8885c81461016b5780638da5cb5b1461017e578063bede39b5146101a6578063bf1fe420146101b957600080fd5b806349948e0e116100d357806349948e0e14610134578063519b4bd3146101475780637046559714610150578063715018a61461016357600080fd5b80630c18c162146100fa578063313ce567146101165780633577afc51461011f575b600080fd5b61010360035481565b6040519081526020015b60405180910390f35b61010360055481565b61013261012d3660046108d0565b610204565b005b610103610142366004610918565b6102c6565b61010360025481565b61013261015e3660046108d0565b610322565b6101326103d8565b6101326101793660046108d0565b610465565b60005460405173ffffffffffffffffffffffffffffffffffffffff909116815260200161010d565b6101326101b43660046108d0565b61051b565b6101326101c73660046108d0565b6105d1565b6101036101da366004610918565b610687565b6101326101ed3660046109e7565b61072b565b61010360045481565b61010360015481565b60005473ffffffffffffffffffffffffffffffffffffffff16331461028a576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820181905260248201527f4f776e61626c653a2063616c6c6572206973206e6f7420746865206f776e657260448201526064015b60405180910390fd5b60038190556040518181527f32740b35c0ea213650f60d44366b4fb211c9033b50714e4a1d34e65d5beb9bb4906020015b60405180910390a150565b6000806102d283610687565b90506000600254826102e49190610a53565b90506000600554600a6102f79190610bb2565b90506000600454836103099190610a53565b905060006103178383610bbe565b979650505050505050565b60005473ffffffffffffffffffffffffffffffffffffffff1633146103a3576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820181905260248201527f4f776e61626c653a2063616c6c6572206973206e6f7420746865206f776e65726044820152606401610281565b60048190556040518181527f3336cd9708eaf2769a0f0dc0679f30e80f15dcd88d1921b5a16858e8b85c591a906020016102bb565b60005473ffffffffffffffffffffffffffffffffffffffff163314610459576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820181905260248201527f4f776e61626c653a2063616c6c6572206973206e6f7420746865206f776e65726044820152606401610281565b610463600061085b565b565b60005473ffffffffffffffffffffffffffffffffffffffff1633146104e6576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820181905260248201527f4f776e61626c653a2063616c6c6572206973206e6f7420746865206f776e65726044820152606401610281565b60058190556040518181527fd68112a8707e326d08be3656b528c1bcc5bbbfc47f4177e2179b14d8640838c1906020016102bb565b60005473ffffffffffffffffffffffffffffffffffffffff16331461059c576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820181905260248201527f4f776e61626c653a2063616c6c6572206973206e6f7420746865206f776e65726044820152606401610281565b60028190556040518181527f351fb23757bb5ea0546c85b7996ddd7155f96b939ebaa5ff7bc49c75f27f2c44906020016102bb565b60005473ffffffffffffffffffffffffffffffffffffffff163314610652576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820181905260248201527f4f776e61626c653a2063616c6c6572206973206e6f7420746865206f776e65726044820152606401610281565b60018190556040518181527ffcdccc6074c6c42e4bd578aa9870c697dc976a270968452d2b8c8dc369fae396906020016102bb565b600080805b8351811015610704578381815181106106a7576106a7610bf9565b01602001517fff00000000000000000000000000000000000000000000000000000000000000166106e4576106dd600483610c28565b91506106f2565b6106ef601083610c28565b91505b806106fc81610c40565b91505061068c565b506000600354826107159190610c28565b905061072381610440610c28565b949350505050565b60005473ffffffffffffffffffffffffffffffffffffffff1633146107ac576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820181905260248201527f4f776e61626c653a2063616c6c6572206973206e6f7420746865206f776e65726044820152606401610281565b73ffffffffffffffffffffffffffffffffffffffff811661084f576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152602660248201527f4f776e61626c653a206e6577206f776e657220697320746865207a65726f206160448201527f64647265737300000000000000000000000000000000000000000000000000006064820152608401610281565b6108588161085b565b50565b6000805473ffffffffffffffffffffffffffffffffffffffff8381167fffffffffffffffffffffffff0000000000000000000000000000000000000000831681178455604051919092169283917f8be0079c531659141344cd1fd0a4f28419497f9722a3daafe3b4186f6b6457e09190a35050565b6000602082840312156108e257600080fd5b5035919050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052604160045260246000fd5b60006020828403121561092a57600080fd5b813567ffffffffffffffff8082111561094257600080fd5b818401915084601f83011261095657600080fd5b813581811115610968576109686108e9565b604051601f82017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0908116603f011681019083821181831017156109ae576109ae6108e9565b816040528281528760208487010111156109c757600080fd5b826020860160208301376000928101602001929092525095945050505050565b6000602082840312156109f957600080fd5b813573ffffffffffffffffffffffffffffffffffffffff81168114610a1d57600080fd5b9392505050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052601160045260246000fd5b6000817fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff0483118215151615610a8b57610a8b610a24565b500290565b600181815b80851115610ae957817fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff04821115610acf57610acf610a24565b80851615610adc57918102915b93841c9390800290610a95565b509250929050565b600082610b0057506001610bac565b81610b0d57506000610bac565b8160018114610b235760028114610b2d57610b49565b6001915050610bac565b60ff841115610b3e57610b3e610a24565b50506001821b610bac565b5060208310610133831016604e8410600b8410161715610b6c575081810a610bac565b610b768383610a90565b807fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff04821115610ba857610ba8610a24565b0290505b92915050565b6000610a1d8383610af1565b600082610bf4577f4e487b7100000000000000000000000000000000000000000000000000000000600052601260045260246000fd5b500490565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052603260045260246000fd5b60008219821115610c3b57610c3b610a24565b500190565b60007fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff821415610c7257610c72610a24565b506001019056fea2646970667358221220b949ef5f9defd6c0aab6259672d00d239cb8854c9972ba1866af1c6ec6433d4c64736f6c63430008090033"
},
"0x4200000000000000000000000000000000000010": {
"balance": "00",
"storage": {
"0x0000000000000000000000000000000000000000000000000000000000000001": "0x000000000000000000000000e68a8b89c6fe738114dada4c47564dbc72338c26",
"0x0000000000000000000000000000000000000000000000000000000000000000": "0x0000000000000000000000004200000000000000000000000000000000000007"
},
"code": "0x608060405234801561001057600080fd5b50600436106100675760003560e01c80633cb747bf116100505780633cb747bf146100ca578063662a633a146100ea578063a3a79548146100fd57600080fd5b806332b7006d1461006c57806336c717c114610081575b600080fd5b61007f61007a366004610d0f565b610110565b005b6001546100a19073ffffffffffffffffffffffffffffffffffffffff1681565b60405173ffffffffffffffffffffffffffffffffffffffff909116815260200160405180910390f35b6000546100a19073ffffffffffffffffffffffffffffffffffffffff1681565b61007f6100f8366004610d80565b610126565b61007f61010b366004610e18565b6106c1565b61011f853333878787876106d8565b5050505050565b60015473ffffffffffffffffffffffffffffffffffffffff1661015e60005473ffffffffffffffffffffffffffffffffffffffff1690565b73ffffffffffffffffffffffffffffffffffffffff163373ffffffffffffffffffffffffffffffffffffffff161461021d576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152602e60248201527f4f564d5f58434841494e3a206d657373656e67657220636f6e7472616374207560448201527f6e61757468656e7469636174656400000000000000000000000000000000000060648201526084015b60405180910390fd5b8073ffffffffffffffffffffffffffffffffffffffff1661025360005473ffffffffffffffffffffffffffffffffffffffff1690565b73ffffffffffffffffffffffffffffffffffffffff16636e296e456040518163ffffffff1660e01b815260040160206040518083038186803b15801561029857600080fd5b505afa1580156102ac573d6000803e3d6000fd5b505050506040513d601f19601f820116820180604052508101906102d09190610e9b565b73ffffffffffffffffffffffffffffffffffffffff1614610373576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152603060248201527f4f564d5f58434841494e3a2077726f6e672073656e646572206f662063726f7360448201527f732d646f6d61696e206d657373616765000000000000000000000000000000006064820152608401610214565b61039d877f1d1d8b6300000000000000000000000000000000000000000000000000000000610a32565b801561045357508673ffffffffffffffffffffffffffffffffffffffff1663c01e1bd66040518163ffffffff1660e01b8152600401602060405180830381600087803b1580156103ec57600080fd5b505af1158015610400573d6000803e3d6000fd5b505050506040513d601f19601f820116820180604052508101906104249190610e9b565b73ffffffffffffffffffffffffffffffffffffffff168873ffffffffffffffffffffffffffffffffffffffff16145b15610567576040517f40c10f1900000000000000000000000000000000000000000000000000000000815273ffffffffffffffffffffffffffffffffffffffff8681166004830152602482018690528816906340c10f1990604401600060405180830381600087803b1580156104c857600080fd5b505af11580156104dc573d6000803e3d6000fd5b505050508573ffffffffffffffffffffffffffffffffffffffff168773ffffffffffffffffffffffffffffffffffffffff168973ffffffffffffffffffffffffffffffffffffffff167fb0444523268717a02698be47d0803aa7468c00acbed2f8bd93a0459cde61dd898888888860405161055a9493929190610f08565b60405180910390a46106b7565b600063a9f9e67560e01b8989888a89898960405160240161058e9796959493929190610f3e565b604080517fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe08184030181529190526020810180517bffffffffffffffffffffffffffffffffffffffffffffffffffffffff167fffffffff00000000000000000000000000000000000000000000000000000000909316929092179091526001549091506106339073ffffffffffffffffffffffffffffffffffffffff16600083610a57565b8673ffffffffffffffffffffffffffffffffffffffff168873ffffffffffffffffffffffffffffffffffffffff168a73ffffffffffffffffffffffffffffffffffffffff167f7ea89a4591614515571c2b51f5ea06494056f261c10ab1ed8c03c7590d87bce0898989896040516106ad9493929190610f08565b60405180910390a4505b5050505050505050565b6106d0863387878787876106d8565b505050505050565b6040517f9dc29fac0000000000000000000000000000000000000000000000000000000081523360048201526024810185905273ffffffffffffffffffffffffffffffffffffffff881690639dc29fac90604401600060405180830381600087803b15801561074657600080fd5b505af115801561075a573d6000803e3d6000fd5b5050505060008773ffffffffffffffffffffffffffffffffffffffff1663c01e1bd66040518163ffffffff1660e01b8152600401602060405180830381600087803b1580156107a857600080fd5b505af11580156107bc573d6000803e3d6000fd5b505050506040513d601f19601f820116820180604052508101906107e09190610e9b565b9050606073ffffffffffffffffffffffffffffffffffffffff891673deaddeaddeaddeaddeaddeaddeaddeaddead000014156108d5576040517f1532ec340000000000000000000000000000000000000000000000000000000090610851908a908a908a9089908990602401610f9b565b604080517fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe08184030181529190526020810180517bffffffffffffffffffffffffffffffffffffffffffffffffffffffff167fffffffff00000000000000000000000000000000000000000000000000000000909316929092179091529050610994565b6040517fa9f9e67500000000000000000000000000000000000000000000000000000000906109149084908c908c908c908c908b908b90602401610f3e565b604080517fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe08184030181529190526020810180517bffffffffffffffffffffffffffffffffffffffffffffffffffffffff167fffffffff000000000000000000000000000000000000000000000000000000009093169290921790915290505b6001546109b89073ffffffffffffffffffffffffffffffffffffffff168683610a57565b3373ffffffffffffffffffffffffffffffffffffffff168973ffffffffffffffffffffffffffffffffffffffff168373ffffffffffffffffffffffffffffffffffffffff167f73d170910aba9e6d50b102db522b1dbcd796216f5128b445aa2135272886497e8a8a89896040516106ad9493929190610f08565b6000610a3d83610ae8565b8015610a4e5750610a4e8383610b4c565b90505b92915050565b6000546040517f3dbb202b00000000000000000000000000000000000000000000000000000000815273ffffffffffffffffffffffffffffffffffffffff90911690633dbb202b90610ab190869085908790600401611016565b600060405180830381600087803b158015610acb57600080fd5b505af1158015610adf573d6000803e3d6000fd5b50505050505050565b6000610b14827f01ffc9a700000000000000000000000000000000000000000000000000000000610b4c565b8015610a515750610b45827fffffffff00000000000000000000000000000000000000000000000000000000610b4c565b1592915050565b604080517fffffffff00000000000000000000000000000000000000000000000000000000831660248083019190915282518083039091018152604490910182526020810180517bffffffffffffffffffffffffffffffffffffffffffffffffffffffff167f01ffc9a7000000000000000000000000000000000000000000000000000000001790529051600091908290819073ffffffffffffffffffffffffffffffffffffffff87169061753090610c06908690611092565b6000604051808303818686fa925050503d8060008114610c42576040519150601f19603f3d011682016040523d82523d6000602084013e610c47565b606091505b5091509150602081511015610c625760009350505050610a51565b818015610c7e575080806020019051810190610c7e91906110ae565b9695505050505050565b73ffffffffffffffffffffffffffffffffffffffff81168114610caa57600080fd5b50565b803563ffffffff81168114610cc157600080fd5b919050565b60008083601f840112610cd857600080fd5b50813567ffffffffffffffff811115610cf057600080fd5b602083019150836020828501011115610d0857600080fd5b9250929050565b600080600080600060808688031215610d2757600080fd5b8535610d3281610c88565b945060208601359350610d4760408701610cad565b9250606086013567ffffffffffffffff811115610d6357600080fd5b610d6f88828901610cc6565b969995985093965092949392505050565b600080600080600080600060c0888a031215610d9b57600080fd5b8735610da681610c88565b96506020880135610db681610c88565b95506040880135610dc681610c88565b94506060880135610dd681610c88565b93506080880135925060a088013567ffffffffffffffff811115610df957600080fd5b610e058a828b01610cc6565b989b979a50959850939692959293505050565b60008060008060008060a08789031215610e3157600080fd5b8635610e3c81610c88565b95506020870135610e4c81610c88565b945060408701359350610e6160608801610cad565b9250608087013567ffffffffffffffff811115610e7d57600080fd5b610e8989828a01610cc6565b979a9699509497509295939492505050565b600060208284031215610ead57600080fd5b8151610eb881610c88565b9392505050565b8183528181602085013750600060208284010152600060207fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0601f840116840101905092915050565b73ffffffffffffffffffffffffffffffffffffffff85168152836020820152606060408201526000610c7e606083018486610ebf565b600073ffffffffffffffffffffffffffffffffffffffff808a1683528089166020840152808816604084015280871660608401525084608083015260c060a0830152610f8e60c083018486610ebf565b9998505050505050505050565b600073ffffffffffffffffffffffffffffffffffffffff808816835280871660208401525084604083015260806060830152610fdb608083018486610ebf565b979650505050505050565b60005b83811015611001578181015183820152602001610fe9565b83811115611010576000848401525b50505050565b73ffffffffffffffffffffffffffffffffffffffff841681526060602082015260008351806060840152611051816080850160208801610fe6565b63ffffffff93909316604083015250601f919091017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0160160800192915050565b600082516110a4818460208701610fe6565b9190910192915050565b6000602082840312156110c057600080fd5b81518015158114610eb857600080fdfea264697066735822122003f4a9fcb2a89d7ca3b6ba7d919977ee33454f3b42553a547cba18d3eb2313c064736f6c63430008090033"
},
"0x4200000000000000000000000000000000000011": {
"balance": "00",
"storage": {
"0x0000000000000000000000000000000000000000000000000000000000000000": "0x00000000000000000000000019f62bb50ebeb60c940f2fa04e2b6399b133cc4b"
},
"code": "0x6080604052600436106100385760003560e01c80633ccfd60b14610044578063d3e5792b1461005b578063d4ff92181461008a57600080fd5b3661003f57005b600080fd5b34801561005057600080fd5b506100596100dc565b005b34801561006757600080fd5b5061007767d02ab486cedc000081565b6040519081526020015b60405180910390f35b34801561009657600080fd5b506000546100b79073ffffffffffffffffffffffffffffffffffffffff1681565b60405173ffffffffffffffffffffffffffffffffffffffff9091168152602001610081565b67d02ab486cedc000047101561019e576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152605760248201527f4f564d5f53657175656e6365724665655661756c743a2077697468647261776160448201527f6c20616d6f756e74206d7573742062652067726561746572207468616e206d6960648201527f6e696d756d207769746864726177616c20616d6f756e74000000000000000000608482015260a40160405180910390fd5b600080546040805160208101825283815290517fa3a795480000000000000000000000000000000000000000000000000000000081527342000000000000000000000000000000000000109363a3a79548936102309373deaddeaddeaddeaddeaddeaddeaddeaddead00009373ffffffffffffffffffffffffffffffffffffffff909216924792909190600401610264565b600060405180830381600087803b15801561024a57600080fd5b505af115801561025e573d6000803e3d6000fd5b50505050565b600073ffffffffffffffffffffffffffffffffffffffff808816835260208188168185015286604085015263ffffffff8616606085015260a06080850152845191508160a085015260005b828110156102cb5785810182015185820160c0015281016102af565b828111156102dd57600060c084870101525b5050601f017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0169190910160c001969550505050505056fea26469706673582212205c4e15d856927730194db4563b3226b44490fc00275320221c4e0c4d3292b66564736f6c63430008090033"
},
"0x4200000000000000000000000000000000000012": {
"balance": "00",
"storage": {},
"code": "0x608060405234801561001057600080fd5b506004361061002b5760003560e01c8063896f93d114610030575b600080fd5b61004361003e36600461025f565b610045565b005b73ffffffffffffffffffffffffffffffffffffffff83166100c6576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152601d60248201527f4d7573742070726f76696465204c3120746f6b656e2061646472657373000000604482015260640160405180910390fd5b60007342000000000000000000000000000000000000108484846040516100ec90610178565b6100f99493929190610359565b604051809103906000f080158015610115573d6000803e3d6000fd5b5090508073ffffffffffffffffffffffffffffffffffffffff168473ffffffffffffffffffffffffffffffffffffffff167fceeb8e7d520d7f3b65fc11a262b91066940193b05d4f93df07cfdced0eb551cf60405160405180910390a350505050565b6113d7806103b083390190565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052604160045260246000fd5b600082601f8301126101c557600080fd5b813567ffffffffffffffff808211156101e0576101e0610185565b604051601f83017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0908116603f0116810190828211818310171561022657610226610185565b8160405283815286602085880101111561023f57600080fd5b836020870160208301376000602085830101528094505050505092915050565b60008060006060848603121561027457600080fd5b833573ffffffffffffffffffffffffffffffffffffffff8116811461029857600080fd5b9250602084013567ffffffffffffffff808211156102b557600080fd5b6102c1878388016101b4565b935060408601359150808211156102d757600080fd5b506102e4868287016101b4565b9150509250925092565b6000815180845260005b81811015610314576020818501810151868301820152016102f8565b81811115610326576000602083870101525b50601f017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0169290920160200192915050565b600073ffffffffffffffffffffffffffffffffffffffff80871683528086166020840152506080604083015261039260808301856102ee565b82810360608401526103a481856102ee565b97965050505050505056fe60806040523480156200001157600080fd5b50604051620013d7380380620013d783398101604081905262000034916200022f565b8151829082906200004d9060039060208501906200009f565b508051620000639060049060208401906200009f565b5050600580546001600160a01b039586166001600160a01b031991821617909155600680549690951695169490941790925550620002fc915050565b828054620000ad90620002bf565b90600052602060002090601f016020900481019282620000d157600085556200011c565b82601f10620000ec57805160ff19168380011785556200011c565b828001600101855582156200011c579182015b828111156200011c578251825591602001919060010190620000ff565b506200012a9291506200012e565b5090565b5b808211156200012a57600081556001016200012f565b80516001600160a01b03811681146200015d57600080fd5b919050565b634e487b7160e01b600052604160045260246000fd5b600082601f8301126200018a57600080fd5b81516001600160401b0380821115620001a757620001a762000162565b604051601f8301601f19908116603f01168101908282118183101715620001d257620001d262000162565b81604052838152602092508683858801011115620001ef57600080fd5b600091505b83821015620002135785820183015181830184015290820190620001f4565b83821115620002255760008385830101525b9695505050505050565b600080600080608085870312156200024657600080fd5b620002518562000145565b9350620002616020860162000145565b60408601519093506001600160401b03808211156200027f57600080fd5b6200028d8883890162000178565b93506060870151915080821115620002a457600080fd5b50620002b38782880162000178565b91505092959194509250565b600181811c90821680620002d457607f821691505b60208210811415620002f657634e487b7160e01b600052602260045260246000fd5b50919050565b6110cb806200030c6000396000f3fe608060405234801561001057600080fd5b50600436106101005760003560e01c806370a0823111610097578063a9059cbb11610066578063a9059cbb14610215578063ae1f6aaf14610228578063c01e1bd61461026d578063dd62ed3e1461028d57600080fd5b806370a08231146101b157806395d89b41146101e75780639dc29fac146101ef578063a457c2d71461020257600080fd5b806323b872dd116100d357806323b872dd14610167578063313ce5671461017a578063395093511461018957806340c10f191461019c57600080fd5b806301ffc9a71461010557806306fdde031461012d578063095ea7b31461014257806318160ddd14610155575b600080fd5b610118610113366004610e4a565b6102d3565b60405190151581526020015b60405180910390f35b610135610393565b6040516101249190610e93565b610118610150366004610f2f565b610425565b6002545b604051908152602001610124565b610118610175366004610f59565b61043b565b60405160128152602001610124565b610118610197366004610f2f565b61050c565b6101af6101aa366004610f2f565b610555565b005b6101596101bf366004610f95565b73ffffffffffffffffffffffffffffffffffffffff1660009081526020819052604090205490565b61013561061a565b6101af6101fd366004610f2f565b610629565b610118610210366004610f2f565b6106e2565b610118610223366004610f2f565b6107a0565b6006546102489073ffffffffffffffffffffffffffffffffffffffff1681565b60405173ffffffffffffffffffffffffffffffffffffffff9091168152602001610124565b6005546102489073ffffffffffffffffffffffffffffffffffffffff1681565b61015961029b366004610fb0565b73ffffffffffffffffffffffffffffffffffffffff918216600090815260016020908152604080832093909416825291909152205490565b60007f01ffc9a7a5cef8baa21ed3c5c0d7e23accb804b619e9333b597f47a0d84076e27f1d1d8b63000000000000000000000000000000000000000000000000000000007fffffffff0000000000000000000000000000000000000000000000000000000084167f01ffc9a700000000000000000000000000000000000000000000000000000000148061038b57507fffffffff00000000000000000000000000000000000000000000000000000000848116908216145b949350505050565b6060600380546103a290610fe3565b80601f01602080910402602001604051908101604052809291908181526020018280546103ce90610fe3565b801561041b5780601f106103f05761010080835404028352916020019161041b565b820191906000526020600020905b8154815290600101906020018083116103fe57829003601f168201915b5050505050905090565b60006104323384846107ad565b50600192915050565b600061044884848461092d565b73ffffffffffffffffffffffffffffffffffffffff84166000908152600160209081526040808320338452909152902054828110156104f45760405162461bcd60e51b815260206004820152602860248201527f45524332303a207472616e7366657220616d6f756e742065786365656473206160448201527f6c6c6f77616e636500000000000000000000000000000000000000000000000060648201526084015b60405180910390fd5b61050185338584036107ad565b506001949350505050565b33600081815260016020908152604080832073ffffffffffffffffffffffffffffffffffffffff871684529091528120549091610432918590610550908690611066565b6107ad565b60065473ffffffffffffffffffffffffffffffffffffffff1633146105bc5760405162461bcd60e51b815260206004820181905260248201527f4f6e6c79204c32204272696467652063616e206d696e7420616e64206275726e60448201526064016104eb565b6105c68282610b93565b8173ffffffffffffffffffffffffffffffffffffffff167f0f6798a560793a54c3bcfe86a93cde1e73087d944c0ea20544137d41213968858260405161060e91815260200190565b60405180910390a25050565b6060600480546103a290610fe3565b60065473ffffffffffffffffffffffffffffffffffffffff1633146106905760405162461bcd60e51b815260206004820181905260248201527f4f6e6c79204c32204272696467652063616e206d696e7420616e64206275726e60448201526064016104eb565b61069a8282610c99565b8173ffffffffffffffffffffffffffffffffffffffff167fcc16f5dbb4873280815c1ee09dbd06736cffcc184412cf7a71a0fdb75d397ca58260405161060e91815260200190565b33600090815260016020908152604080832073ffffffffffffffffffffffffffffffffffffffff86168452909152812054828110156107895760405162461bcd60e51b815260206004820152602560248201527f45524332303a2064656372656173656420616c6c6f77616e63652062656c6f7760448201527f207a65726f00000000000000000000000000000000000000000000000000000060648201526084016104eb565b61079633858584036107ad565b5060019392505050565b600061043233848461092d565b73ffffffffffffffffffffffffffffffffffffffff83166108355760405162461bcd60e51b8152602060048201526024808201527f45524332303a20617070726f76652066726f6d20746865207a65726f2061646460448201527f726573730000000000000000000000000000000000000000000000000000000060648201526084016104eb565b73ffffffffffffffffffffffffffffffffffffffff82166108be5760405162461bcd60e51b815260206004820152602260248201527f45524332303a20617070726f766520746f20746865207a65726f20616464726560448201527f737300000000000000000000000000000000000000000000000000000000000060648201526084016104eb565b73ffffffffffffffffffffffffffffffffffffffff83811660008181526001602090815260408083209487168084529482529182902085905590518481527f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b92591015b60405180910390a3505050565b73ffffffffffffffffffffffffffffffffffffffff83166109b65760405162461bcd60e51b815260206004820152602560248201527f45524332303a207472616e736665722066726f6d20746865207a65726f20616460448201527f647265737300000000000000000000000000000000000000000000000000000060648201526084016104eb565b73ffffffffffffffffffffffffffffffffffffffff8216610a3f5760405162461bcd60e51b815260206004820152602360248201527f45524332303a207472616e7366657220746f20746865207a65726f206164647260448201527f657373000000000000000000000000000000000000000000000000000000000060648201526084016104eb565b73ffffffffffffffffffffffffffffffffffffffff831660009081526020819052604090205481811015610adb5760405162461bcd60e51b815260206004820152602660248201527f45524332303a207472616e7366657220616d6f756e742065786365656473206260448201527f616c616e6365000000000000000000000000000000000000000000000000000060648201526084016104eb565b73ffffffffffffffffffffffffffffffffffffffff808516600090815260208190526040808220858503905591851681529081208054849290610b1f908490611066565b925050819055508273ffffffffffffffffffffffffffffffffffffffff168473ffffffffffffffffffffffffffffffffffffffff167fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef84604051610b8591815260200190565b60405180910390a350505050565b73ffffffffffffffffffffffffffffffffffffffff8216610bf65760405162461bcd60e51b815260206004820152601f60248201527f45524332303a206d696e7420746f20746865207a65726f20616464726573730060448201526064016104eb565b8060026000828254610c089190611066565b909155505073ffffffffffffffffffffffffffffffffffffffff821660009081526020819052604081208054839290610c42908490611066565b909155505060405181815273ffffffffffffffffffffffffffffffffffffffff8316906000907fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef9060200160405180910390a35050565b73ffffffffffffffffffffffffffffffffffffffff8216610d225760405162461bcd60e51b815260206004820152602160248201527f45524332303a206275726e2066726f6d20746865207a65726f2061646472657360448201527f730000000000000000000000000000000000000000000000000000000000000060648201526084016104eb565b73ffffffffffffffffffffffffffffffffffffffff821660009081526020819052604090205481811015610dbe5760405162461bcd60e51b815260206004820152602260248201527f45524332303a206275726e20616d6f756e7420657863656564732062616c616e60448201527f636500000000000000000000000000000000000000000000000000000000000060648201526084016104eb565b73ffffffffffffffffffffffffffffffffffffffff83166000908152602081905260408120838303905560028054849290610dfa90849061107e565b909155505060405182815260009073ffffffffffffffffffffffffffffffffffffffff8516907fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef90602001610920565b600060208284031215610e5c57600080fd5b81357fffffffff0000000000000000000000000000000000000000000000000000000081168114610e8c57600080fd5b9392505050565b600060208083528351808285015260005b81811015610ec057858101830151858201604001528201610ea4565b81811115610ed2576000604083870101525b50601f017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe016929092016040019392505050565b803573ffffffffffffffffffffffffffffffffffffffff81168114610f2a57600080fd5b919050565b60008060408385031215610f4257600080fd5b610f4b83610f06565b946020939093013593505050565b600080600060608486031215610f6e57600080fd5b610f7784610f06565b9250610f8560208501610f06565b9150604084013590509250925092565b600060208284031215610fa757600080fd5b610e8c82610f06565b60008060408385031215610fc357600080fd5b610fcc83610f06565b9150610fda60208401610f06565b90509250929050565b600181811c90821680610ff757607f821691505b60208210811415611031577f4e487b7100000000000000000000000000000000000000000000000000000000600052602260045260246000fd5b50919050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052601160045260246000fd5b6000821982111561107957611079611037565b500190565b60008282101561109057611090611037565b50039056fea264697066735822122069a2d33039157f3f2f180571262ca2a5d0a3a24d33bf9448f3b7c2ce9ff757f964736f6c63430008090033a2646970667358221220d2e13f28319115807ec7308d1cd88642a8542d0b838e00b8769f8a85d696f26764736f6c63430008090033"
},
"0x4200000000000000000000000000000000000013": {
"balance": "00",
"storage": {},
"code": "0x4B60005260206000F3"
},
"0xDeadDeAddeAddEAddeadDEaDDEAdDeaDDeAD0000": {
"balance": "00",
"storage": {
"0x0000000000000000000000000000000000000000000000000000000000000006": "0x0000000000000000000000004200000000000000000000000000000000000010",
"0x0000000000000000000000000000000000000000000000000000000000000005": "0x0000000000000000000000000000000000000000000000000000000000000000",
"0x0000000000000000000000000000000000000000000000000000000000000003": "0x457468657200000000000000000000000000000000000000000000000000000a",
"0x0000000000000000000000000000000000000000000000000000000000000004": "0x4554480000000000000000000000000000000000000000000000000000000006"
},
"code": "0x608060405234801561001057600080fd5b50600436106101005760003560e01c806370a0823111610097578063a9059cbb11610066578063a9059cbb14610215578063ae1f6aaf14610228578063c01e1bd61461026d578063dd62ed3e1461028d57600080fd5b806370a08231146101b157806395d89b41146101e75780639dc29fac146101ef578063a457c2d71461020257600080fd5b806323b872dd116100d357806323b872dd14610167578063313ce5671461017a578063395093511461018957806340c10f191461019c57600080fd5b806301ffc9a71461010557806306fdde031461012d578063095ea7b31461014257806318160ddd14610155575b600080fd5b610118610113366004610c6d565b6102d3565b60405190151581526020015b60405180910390f35b610135610393565b6040516101249190610cb6565b610118610150366004610d52565b610425565b6002545b604051908152602001610124565b610118610175366004610d7c565b6104db565b60405160128152602001610124565b610118610197366004610d52565b61058c565b6101af6101aa366004610d52565b61063d565b005b6101596101bf366004610db8565b73ffffffffffffffffffffffffffffffffffffffff1660009081526020819052604090205490565b61013561071c565b6101af6101fd366004610d52565b61072b565b610118610210366004610d52565b6107fe565b610118610223366004610d52565b6108af565b6006546102489073ffffffffffffffffffffffffffffffffffffffff1681565b60405173ffffffffffffffffffffffffffffffffffffffff9091168152602001610124565b6005546102489073ffffffffffffffffffffffffffffffffffffffff1681565b61015961029b366004610dd3565b73ffffffffffffffffffffffffffffffffffffffff918216600090815260016020908152604080832093909416825291909152205490565b60007f01ffc9a7a5cef8baa21ed3c5c0d7e23accb804b619e9333b597f47a0d84076e27f1d1d8b63000000000000000000000000000000000000000000000000000000007fffffffff0000000000000000000000000000000000000000000000000000000084167f01ffc9a700000000000000000000000000000000000000000000000000000000148061038b57507fffffffff00000000000000000000000000000000000000000000000000000000848116908216145b949350505050565b6060600380546103a290610e06565b80601f01602080910402602001604051908101604052809291908181526020018280546103ce90610e06565b801561041b5780601f106103f05761010080835404028352916020019161041b565b820191906000526020600020905b8154815290600101906020018083116103fe57829003601f168201915b5050505050905090565b6040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152604260248201527f4f564d5f4554483a20617070726f76652069732064697361626c65642070656e60448201527f64696e67206675727468657220636f6d6d756e6974792064697363757373696f60648201527f6e2e000000000000000000000000000000000000000000000000000000000000608482015260009060a4015b60405180910390fd5b6040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152604760248201527f4f564d5f4554483a207472616e7366657246726f6d2069732064697361626c6560448201527f642070656e64696e67206675727468657220636f6d6d756e697479206469736360648201527f757373696f6e2e00000000000000000000000000000000000000000000000000608482015260009060a4016104d2565b6040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152604c60248201527f4f564d5f4554483a20696e637265617365416c6c6f77616e636520697320646960448201527f7361626c65642070656e64696e67206675727468657220636f6d6d756e69747960648201527f2064697363757373696f6e2e0000000000000000000000000000000000000000608482015260009060a4016104d2565b60065473ffffffffffffffffffffffffffffffffffffffff1633146106be576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820181905260248201527f4f6e6c79204c32204272696467652063616e206d696e7420616e64206275726e60448201526064016104d2565b6106c88282610960565b8173ffffffffffffffffffffffffffffffffffffffff167f0f6798a560793a54c3bcfe86a93cde1e73087d944c0ea20544137d41213968858260405161071091815260200190565b60405180910390a25050565b6060600480546103a290610e06565b60065473ffffffffffffffffffffffffffffffffffffffff1633146107ac576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820181905260248201527f4f6e6c79204c32204272696467652063616e206d696e7420616e64206275726e60448201526064016104d2565b6107b68282610a80565b8173ffffffffffffffffffffffffffffffffffffffff167fcc16f5dbb4873280815c1ee09dbd06736cffcc184412cf7a71a0fdb75d397ca58260405161071091815260200190565b6040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152604c60248201527f4f564d5f4554483a206465637265617365416c6c6f77616e636520697320646960448201527f7361626c65642070656e64696e67206675727468657220636f6d6d756e69747960648201527f2064697363757373696f6e2e0000000000000000000000000000000000000000608482015260009060a4016104d2565b6040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152604360248201527f4f564d5f4554483a207472616e736665722069732064697361626c656420706560448201527f6e64696e67206675727468657220636f6d6d756e69747920646973637573736960648201527f6f6e2e0000000000000000000000000000000000000000000000000000000000608482015260009060a4016104d2565b73ffffffffffffffffffffffffffffffffffffffff82166109dd576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152601f60248201527f45524332303a206d696e7420746f20746865207a65726f20616464726573730060448201526064016104d2565b80600260008282546109ef9190610e89565b909155505073ffffffffffffffffffffffffffffffffffffffff821660009081526020819052604081208054839290610a29908490610e89565b909155505060405181815273ffffffffffffffffffffffffffffffffffffffff8316906000907fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef9060200160405180910390a35050565b73ffffffffffffffffffffffffffffffffffffffff8216610b23576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152602160248201527f45524332303a206275726e2066726f6d20746865207a65726f2061646472657360448201527f730000000000000000000000000000000000000000000000000000000000000060648201526084016104d2565b73ffffffffffffffffffffffffffffffffffffffff821660009081526020819052604090205481811015610bd9576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152602260248201527f45524332303a206275726e20616d6f756e7420657863656564732062616c616e60448201527f636500000000000000000000000000000000000000000000000000000000000060648201526084016104d2565b73ffffffffffffffffffffffffffffffffffffffff83166000908152602081905260408120838303905560028054849290610c15908490610ea1565b909155505060405182815260009073ffffffffffffffffffffffffffffffffffffffff8516907fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef9060200160405180910390a3505050565b600060208284031215610c7f57600080fd5b81357fffffffff0000000000000000000000000000000000000000000000000000000081168114610caf57600080fd5b9392505050565b600060208083528351808285015260005b81811015610ce357858101830151858201604001528201610cc7565b81811115610cf5576000604083870101525b50601f017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe016929092016040019392505050565b803573ffffffffffffffffffffffffffffffffffffffff81168114610d4d57600080fd5b919050565b60008060408385031215610d6557600080fd5b610d6e83610d29565b946020939093013593505050565b600080600060608486031215610d9157600080fd5b610d9a84610d29565b9250610da860208501610d29565b9150604084013590509250925092565b600060208284031215610dca57600080fd5b610caf82610d29565b60008060408385031215610de657600080fd5b610def83610d29565b9150610dfd60208401610d29565b90509250929050565b600181811c90821680610e1a57607f821691505b60208210811415610e54577f4e487b7100000000000000000000000000000000000000000000000000000000600052602260045260246000fd5b50919050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052601160045260246000fd5b60008219821115610e9c57610e9c610e5a565b500190565b600082821015610eb357610eb3610e5a565b50039056fea2646970667358221220b71535a5111461b42945e5d842957b3a5926f7ed07d271872f6da21952b5f8b464736f6c63430008090033"
},
"0x4200000000000000000000000000000000000006": {
"balance": "00",
"storage": {
"0x0000000000000000000000000000000000000000000000000000000000000000": "0x577261707065642045746865720000000000000000000000000000000000001a",
"0x0000000000000000000000000000000000000000000000000000000000000001": "0x5745544800000000000000000000000000000000000000000000000000000008",
"0x0000000000000000000000000000000000000000000000000000000000000002": "0x0000000000000000000000000000000000000000000000000000000000000012"
},
"code": "0x6080604052600436106100bc5760003560e01c8063313ce56711610074578063a9059cbb1161004e578063a9059cbb146102cb578063d0e30db0146100bc578063dd62ed3e14610311576100bc565b8063313ce5671461024b57806370a082311461027657806395d89b41146102b6576100bc565b806318160ddd116100a557806318160ddd146101aa57806323b872dd146101d15780632e1a7d4d14610221576100bc565b806306fdde03146100c6578063095ea7b314610150575b6100c4610359565b005b3480156100d257600080fd5b506100db6103a8565b6040805160208082528351818301528351919283929083019185019080838360005b838110156101155781810151838201526020016100fd565b50505050905090810190601f1680156101425780820380516001836020036101000a031916815260200191505b509250505060405180910390f35b34801561015c57600080fd5b506101966004803603604081101561017357600080fd5b5073ffffffffffffffffffffffffffffffffffffffff8135169060200135610454565b604080519115158252519081900360200190f35b3480156101b657600080fd5b506101bf6104c7565b60408051918252519081900360200190f35b3480156101dd57600080fd5b50610196600480360360608110156101f457600080fd5b5073ffffffffffffffffffffffffffffffffffffffff8135811691602081013590911690604001356104cb565b34801561022d57600080fd5b506100c46004803603602081101561024457600080fd5b503561066b565b34801561025757600080fd5b50610260610700565b6040805160ff9092168252519081900360200190f35b34801561028257600080fd5b506101bf6004803603602081101561029957600080fd5b503573ffffffffffffffffffffffffffffffffffffffff16610709565b3480156102c257600080fd5b506100db61071b565b3480156102d757600080fd5b50610196600480360360408110156102ee57600080fd5b5073ffffffffffffffffffffffffffffffffffffffff8135169060200135610793565b34801561031d57600080fd5b506101bf6004803603604081101561033457600080fd5b5073ffffffffffffffffffffffffffffffffffffffff813581169160200135166107a7565b33600081815260036020908152604091829020805434908101909155825190815291517fe1fffcc4923d04b559f4d29a8bfc6cda04eb5b0d3c460751c2402c5c5cc9109c9281900390910190a2565b6000805460408051602060026001851615610100027fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff0190941693909304601f8101849004840282018401909252818152929183018282801561044c5780601f106104215761010080835404028352916020019161044c565b820191906000526020600020905b81548152906001019060200180831161042f57829003601f168201915b505050505081565b33600081815260046020908152604080832073ffffffffffffffffffffffffffffffffffffffff8716808552908352818420869055815186815291519394909390927f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b925928290030190a350600192915050565b4790565b73ffffffffffffffffffffffffffffffffffffffff83166000908152600360205260408120548211156104fd57600080fd5b73ffffffffffffffffffffffffffffffffffffffff84163314801590610573575073ffffffffffffffffffffffffffffffffffffffff841660009081526004602090815260408083203384529091529020547fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff14155b156105ed5773ffffffffffffffffffffffffffffffffffffffff841660009081526004602090815260408083203384529091529020548211156105b557600080fd5b73ffffffffffffffffffffffffffffffffffffffff841660009081526004602090815260408083203384529091529020805483900390555b73ffffffffffffffffffffffffffffffffffffffff808516600081815260036020908152604080832080548890039055938716808352918490208054870190558351868152935191937fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef929081900390910190a35060019392505050565b3360009081526003602052604090205481111561068757600080fd5b33600081815260036020526040808220805485900390555183156108fc0291849190818181858888f193505050501580156106c6573d6000803e3d6000fd5b5060408051828152905133917f7fcf532c15f0a6db0bd6d0e038bea71d30d808c7d98cb3bf7268a95bf5081b65919081900360200190a250565b60025460ff1681565b60036020526000908152604090205481565b60018054604080516020600284861615610100027fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff0190941693909304601f8101849004840282018401909252818152929183018282801561044c5780601f106104215761010080835404028352916020019161044c565b60006107a03384846104cb565b9392505050565b60046020908152600092835260408084209091529082529020548156fea265627a7a7231582091c18790e0cca5011d2518024840ee00fecc67e11f56fd746f2cf84d5b583e0064736f6c63430005110032"
}
}
}
\ No newline at end of file
...@@ -208,14 +208,13 @@ func (st *StateTransition) buyGas() error { ...@@ -208,14 +208,13 @@ func (st *StateTransition) buyGas() error {
func (st *StateTransition) preCheck() error { func (st *StateTransition) preCheck() error {
// Make sure this transaction's nonce is correct. // Make sure this transaction's nonce is correct.
if st.msg.CheckNonce() { if st.msg.CheckNonce() {
if rcfg.UsingOVM {
if st.msg.QueueOrigin() == types.QueueOriginL1ToL2 {
return st.buyGas()
}
}
nonce := st.state.GetNonce(st.msg.From()) nonce := st.state.GetNonce(st.msg.From())
if nonce < st.msg.Nonce() { if nonce < st.msg.Nonce() {
if rcfg.UsingOVM {
// The nonce never increments for L1ToL2 txs
if st.msg.QueueOrigin() == types.QueueOriginL1ToL2 {
return st.buyGas()
}
}
return ErrNonceTooHigh return ErrNonceTooHigh
} else if nonce > st.msg.Nonce() { } else if nonce > st.msg.Nonce() {
return ErrNonceTooLow return ErrNonceTooLow
......
...@@ -50,7 +50,10 @@ import ( ...@@ -50,7 +50,10 @@ import (
"github.com/tyler-smith/go-bip39" "github.com/tyler-smith/go-bip39"
) )
var errOVMUnsupported = errors.New("OVM: Unsupported RPC Method") var (
errOVMUnsupported = errors.New("OVM: Unsupported RPC Method")
errNoSequencerURL = errors.New("sequencer transaction forwarding not configured")
)
const ( const (
// defaultDialTimeout is default duration the service will wait on // defaultDialTimeout is default duration the service will wait on
...@@ -1678,7 +1681,11 @@ func (s *PublicTransactionPoolAPI) SendRawTransaction(ctx context.Context, encod ...@@ -1678,7 +1681,11 @@ func (s *PublicTransactionPoolAPI) SendRawTransaction(ctx context.Context, encod
} }
if s.b.IsVerifier() { if s.b.IsVerifier() {
client, err := dialSequencerClientWithTimeout(ctx, s.b.SequencerClientHttp()) sequencerURL := s.b.SequencerClientHttp()
if sequencerURL == "" {
return common.Hash{}, errNoSequencerURL
}
client, err := dialSequencerClientWithTimeout(ctx, sequencerURL)
if err != nil { if err != nil {
return common.Hash{}, err return common.Hash{}, err
} }
......
...@@ -838,12 +838,13 @@ func (s *SyncService) applyTransactionToTip(tx *types.Transaction) error { ...@@ -838,12 +838,13 @@ func (s *SyncService) applyTransactionToTip(tx *types.Transaction) error {
// Set the L1 blocknumber // Set the L1 blocknumber
if l1BlockNumber == nil { if l1BlockNumber == nil {
tx.SetL1BlockNumber(bn) tx.SetL1BlockNumber(bn)
} else if l1BlockNumber.Uint64() > s.GetLatestL1BlockNumber() { } else if l1BlockNumber.Uint64() > bn {
s.SetLatestL1BlockNumber(l1BlockNumber.Uint64()) s.SetLatestL1BlockNumber(l1BlockNumber.Uint64())
} else { } else if l1BlockNumber.Uint64() < bn {
// l1BlockNumber < latest l1BlockNumber // l1BlockNumber < latest l1BlockNumber
// indicates an error // indicates an error
log.Error("Blocknumber monotonicity violation", "hash", tx.Hash().Hex()) log.Error("Blocknumber monotonicity violation", "hash", tx.Hash().Hex(),
"new", l1BlockNumber.Uint64(), "old", bn)
} }
// Store the latest timestamp value // Store the latest timestamp value
......
...@@ -32,6 +32,10 @@ docker-compose \ ...@@ -32,6 +32,10 @@ docker-compose \
*Note*: This generates a large amount of log data which docker stores by default. See [Disk Usage](#disk-usage). *Note*: This generates a large amount of log data which docker stores by default. See [Disk Usage](#disk-usage).
Also note that Docker Desktop only allocates 2GB of memory by default, which isn't enough to run the docker-compose services reliably.
To allocate more memory, go to Settings > Resources in the Docker UI and use the slider to change the value (_4GB recommended_). Make sure to click Apply & Restart for the changes to take effect.
To start the stack with monitoring enabled, just add the metric composition file. To start the stack with monitoring enabled, just add the metric composition file.
``` ```
docker-compose \ docker-compose \
......
...@@ -14,6 +14,7 @@ import { Gauge, Histogram, Counter } from 'prom-client' ...@@ -14,6 +14,7 @@ import { Gauge, Histogram, Counter } from 'prom-client'
import { RollupInfo, sleep } from '@eth-optimism/core-utils' import { RollupInfo, sleep } from '@eth-optimism/core-utils'
import { Logger, Metrics } from '@eth-optimism/common-ts' import { Logger, Metrics } from '@eth-optimism/common-ts'
import { getContractFactory } from 'old-contracts' import { getContractFactory } from 'old-contracts'
/* Internal Imports */ /* Internal Imports */
import { TxSubmissionHooks } from '..' import { TxSubmissionHooks } from '..'
......
...@@ -12,8 +12,8 @@ import { ...@@ -12,8 +12,8 @@ import {
import { Logger, Metrics } from '@eth-optimism/common-ts' import { Logger, Metrics } from '@eth-optimism/common-ts'
/* Internal Imports */ /* Internal Imports */
import { BlockRange, BatchSubmitter } from '.'
import { TransactionSubmitter } from '../utils' import { TransactionSubmitter } from '../utils'
import { BlockRange, BatchSubmitter } from '.'
export class StateBatchSubmitter extends BatchSubmitter { export class StateBatchSubmitter extends BatchSubmitter {
// TODO: Change this so that we calculate start = scc.totalElements() and end = ctc.totalElements()! // TODO: Change this so that we calculate start = scc.totalElements() and end = ctc.totalElements()!
......
...@@ -20,9 +20,8 @@ import { ...@@ -20,9 +20,8 @@ import {
BatchContext, BatchContext,
AppendSequencerBatchParams, AppendSequencerBatchParams,
} from '../transaction-chain-contract' } from '../transaction-chain-contract'
import { BlockRange, BatchSubmitter } from '.'
import { TransactionSubmitter } from '../utils' import { TransactionSubmitter } from '../utils'
import { BlockRange, BatchSubmitter } from '.'
export interface AutoFixBatchOptions { export interface AutoFixBatchOptions {
fixDoublePlayedDeposits: boolean fixDoublePlayedDeposits: boolean
......
/* External Imports */ /* External Imports */
import { exit } from 'process'
import { injectL2Context, Bcfg } from '@eth-optimism/core-utils' import { injectL2Context, Bcfg } from '@eth-optimism/core-utils'
import * as Sentry from '@sentry/node' import * as Sentry from '@sentry/node'
import { Logger, Metrics, createMetricsServer } from '@eth-optimism/common-ts' import { Logger, Metrics, createMetricsServer } from '@eth-optimism/common-ts'
import { exit } from 'process'
import { Signer, Wallet } from 'ethers' import { Signer, Wallet } from 'ethers'
import { import {
StaticJsonRpcProvider, StaticJsonRpcProvider,
......
import { expect } from '../setup'
/* External Imports */ /* External Imports */
import { ethers } from 'hardhat' import { ethers } from 'hardhat'
import '@nomiclabs/hardhat-ethers' import '@nomiclabs/hardhat-ethers'
import { Signer, ContractFactory, Contract, BigNumber } from 'ethers' import { Signer, ContractFactory, Contract, BigNumber } from 'ethers'
import sinon from 'sinon' import sinon from 'sinon'
import scc from '@eth-optimism/contracts/artifacts/contracts/L1/rollup/StateCommitmentChain.sol/StateCommitmentChain.json' import scc from '@eth-optimism/contracts/artifacts/contracts/L1/rollup/StateCommitmentChain.sol/StateCommitmentChain.json'
import { getContractInterface } from '@eth-optimism/contracts' import { getContractInterface } from '@eth-optimism/contracts'
import { smockit, MockContract } from '@eth-optimism/smock' import { smockit, MockContract } from '@eth-optimism/smock'
import { getContractFactory } from 'old-contracts' import { getContractFactory } from 'old-contracts'
import { QueueOrigin, Batch, remove0x } from '@eth-optimism/core-utils'
import { Logger, Metrics } from '@eth-optimism/common-ts'
/* Internal Imports */ /* Internal Imports */
import { MockchainProvider } from './mockchain-provider' import { MockchainProvider } from './mockchain-provider'
import { import { expect } from '../setup'
makeAddressManager,
setProxyTarget,
FORCE_INCLUSION_PERIOD_SECONDS,
} from '../helpers'
import { import {
CanonicalTransactionChainContract, CanonicalTransactionChainContract,
TransactionBatchSubmitter as RealTransactionBatchSubmitter, TransactionBatchSubmitter as RealTransactionBatchSubmitter,
...@@ -28,9 +22,11 @@ import { ...@@ -28,9 +22,11 @@ import {
YnatmTransactionSubmitter, YnatmTransactionSubmitter,
ResubmissionConfig, ResubmissionConfig,
} from '../../src' } from '../../src'
import {
import { QueueOrigin, Batch, remove0x } from '@eth-optimism/core-utils' makeAddressManager,
import { Logger, Metrics } from '@eth-optimism/common-ts' setProxyTarget,
FORCE_INCLUSION_PERIOD_SECONDS,
} from '../helpers'
const EXAMPLE_STATE_ROOT = const EXAMPLE_STATE_ROOT =
'0x16b7f83f409c7195b1f4fde5652f1b54a4477eacb6db7927691becafba5f8801' '0x16b7f83f409c7195b1f4fde5652f1b54a4477eacb6db7927691becafba5f8801'
......
import { expect } from '../setup'
import { ethers, BigNumber, Signer } from 'ethers' import { ethers, BigNumber, Signer } from 'ethers'
import { submitTransactionWithYNATM } from '../../src/utils/tx-submission'
import { ResubmissionConfig } from '../../src'
import { import {
TransactionReceipt, TransactionReceipt,
TransactionResponse, TransactionResponse,
} from '@ethersproject/abstract-provider' } from '@ethersproject/abstract-provider'
import { expect } from '../setup'
import { submitTransactionWithYNATM } from '../../src/utils/tx-submission'
import { ResubmissionConfig } from '../../src'
const nullFunction = () => undefined const nullFunction = () => undefined
const nullHooks = { const nullHooks = {
beforeSendTransaction: nullFunction, beforeSendTransaction: nullFunction,
......
import { Server } from 'net'
import prometheus, { import prometheus, {
collectDefaultMetrics, collectDefaultMetrics,
DefaultMetricsCollectorConfiguration, DefaultMetricsCollectorConfiguration,
Registry, Registry,
} from 'prom-client' } from 'prom-client'
import express from 'express' import express from 'express'
import { Server } from 'net'
import { Logger } from './logger' import { Logger } from './logger'
......
/* External Imports */ /* External Imports */
import * as fs from 'fs' import * as fs from 'fs'
import * as path from 'path' import * as path from 'path'
import * as mkdirp from 'mkdirp' import * as mkdirp from 'mkdirp'
const ensure = (value, key) => { const ensure = (value, key) => {
......
/* eslint @typescript-eslint/no-var-requires: "off" */ /* eslint @typescript-eslint/no-var-requires: "off" */
import { ethers } from 'ethers' import { ethers } from 'ethers'
import { DeployFunction } from 'hardhat-deploy/dist/types' import { DeployFunction } from 'hardhat-deploy/dist/types'
import { awaitCondition } from '@eth-optimism/core-utils'
import { import {
getContractFromArtifact, getContractFromArtifact,
fundAccount, fundAccount,
...@@ -8,7 +10,6 @@ import { ...@@ -8,7 +10,6 @@ import {
BIG_BALANCE, BIG_BALANCE,
} from '../src/deploy-utils' } from '../src/deploy-utils'
import { names } from '../src/address-names' import { names } from '../src/address-names'
import { awaitCondition } from '@eth-optimism/core-utils'
const deployFn: DeployFunction = async (hre) => { const deployFn: DeployFunction = async (hre) => {
if ((hre as any).deployConfig.forked !== 'true') { if ((hre as any).deployConfig.forked !== 'true') {
......
/* Imports: Internal */ /* Imports: Internal */
import { DeployFunction } from 'hardhat-deploy/dist/types'
import { names } from '../src/address-names' import { names } from '../src/address-names'
/* Imports: External */ /* Imports: External */
import { DeployFunction } from 'hardhat-deploy/dist/types'
const deployFn: DeployFunction = async (hre) => { const deployFn: DeployFunction = async (hre) => {
const { deploy } = hre.deployments const { deploy } = hre.deployments
......
import path from 'path' import path from 'path'
import glob from 'glob'
import fs from 'fs' import fs from 'fs'
import glob from 'glob'
/** /**
* Script for automatically generating a file which has a series of `require` statements for * Script for automatically generating a file which has a series of `require` statements for
* importing JSON contract artifacts. We do this to preserve browser compatibility. * importing JSON contract artifacts. We do this to preserve browser compatibility.
......
import path from 'path' import path from 'path'
import glob from 'glob'
import fs from 'fs' import fs from 'fs'
import glob from 'glob'
/** /**
* Script for automatically generating a TypeScript file for retrieving deploy artifact JSON files. * Script for automatically generating a TypeScript file for retrieving deploy artifact JSON files.
* We do this to make sure that this package remains browser compatible. * We do this to make sure that this package remains browser compatible.
......
import dirtree from 'directory-tree'
import fs from 'fs' import fs from 'fs'
import path from 'path' import path from 'path'
import dirtree from 'directory-tree'
import { predeploys } from '../src' import { predeploys } from '../src'
interface DeploymentInfo { interface DeploymentInfo {
......
/* External Imports */ /* External Imports */
import { promisify } from 'util'
import { exec } from 'child_process' import { exec } from 'child_process'
import { promisify } from 'util'
import { ethers } from 'ethers' import { ethers } from 'ethers'
import { import {
computeStorageSlots, computeStorageSlots,
......
import { createInterface } from 'readline' import { createInterface } from 'readline'
import { hexStringEquals } from '@eth-optimism/core-utils' import { hexStringEquals } from '@eth-optimism/core-utils'
export const getInput = (query) => { export const getInput = (query) => {
......
...@@ -4,9 +4,9 @@ import { ethers } from 'ethers' ...@@ -4,9 +4,9 @@ import { ethers } from 'ethers'
import { task } from 'hardhat/config' import { task } from 'hardhat/config'
import * as types from 'hardhat/internal/core/params/argumentTypes' import * as types from 'hardhat/internal/core/params/argumentTypes'
import { hexStringEquals } from '@eth-optimism/core-utils' import { hexStringEquals } from '@eth-optimism/core-utils'
import { getContractFactory, getContractDefinition } from '../src/contract-defs' import { getContractFactory, getContractDefinition } from '../src/contract-defs'
import { names } from '../src/address-names' import { names } from '../src/address-names'
import { import {
getInput, getInput,
color as c, color as c,
......
...@@ -3,8 +3,8 @@ ...@@ -3,8 +3,8 @@
import { ethers } from 'ethers' import { ethers } from 'ethers'
import { task } from 'hardhat/config' import { task } from 'hardhat/config'
import * as types from 'hardhat/internal/core/params/argumentTypes' import * as types from 'hardhat/internal/core/params/argumentTypes'
import { getContractFactory, getContractDefinition } from '../src/contract-defs'
import { getContractFactory, getContractDefinition } from '../src/contract-defs'
import { import {
getInput, getInput,
color as c, color as c,
......
'use strict' 'use strict'
import fs from 'fs' import fs from 'fs'
import { ethers } from 'ethers' import { ethers } from 'ethers'
import { task } from 'hardhat/config' import { task } from 'hardhat/config'
import * as types from 'hardhat/internal/core/params/argumentTypes' import * as types from 'hardhat/internal/core/params/argumentTypes'
import { LedgerSigner } from '@ethersproject/hardware-wallets' import { LedgerSigner } from '@ethersproject/hardware-wallets'
import { getContractFactory } from '../src/contract-defs' import { getContractFactory } from '../src/contract-defs'
import { predeploys } from '../src/predeploys' import { predeploys } from '../src/predeploys'
......
...@@ -4,6 +4,7 @@ import { ethers } from 'ethers' ...@@ -4,6 +4,7 @@ import { ethers } from 'ethers'
import { task } from 'hardhat/config' import { task } from 'hardhat/config'
import * as types from 'hardhat/internal/core/params/argumentTypes' import * as types from 'hardhat/internal/core/params/argumentTypes'
import { LedgerSigner } from '@ethersproject/hardware-wallets' import { LedgerSigner } from '@ethersproject/hardware-wallets'
import { getContractFactory } from '../src/contract-defs' import { getContractFactory } from '../src/contract-defs'
import { predeploys } from '../src/predeploys' import { predeploys } from '../src/predeploys'
......
import { expect } from '../../../setup'
/* External Imports */ /* External Imports */
import { ethers } from 'hardhat' import { ethers } from 'hardhat'
import { Signer, ContractFactory, Contract, BigNumber } from 'ethers' import { Signer, ContractFactory, Contract, BigNumber } from 'ethers'
...@@ -11,6 +9,7 @@ import { ...@@ -11,6 +9,7 @@ import {
} from '@eth-optimism/core-utils' } from '@eth-optimism/core-utils'
/* Internal Imports */ /* Internal Imports */
import { expect } from '../../../setup'
import { import {
makeAddressManager, makeAddressManager,
setProxyTarget, setProxyTarget,
......
import { expect } from '../../../setup'
/* External Imports */ /* External Imports */
import { ethers } from 'hardhat' import { ethers } from 'hardhat'
import { Signer, ContractFactory, Contract, constants } from 'ethers' import { Signer, ContractFactory, Contract, constants } from 'ethers'
...@@ -7,6 +5,7 @@ import { Interface } from 'ethers/lib/utils' ...@@ -7,6 +5,7 @@ import { Interface } from 'ethers/lib/utils'
import { smockit, MockContract, smoddit } from '@eth-optimism/smock' import { smockit, MockContract, smoddit } from '@eth-optimism/smock'
/* Internal Imports */ /* Internal Imports */
import { expect } from '../../../setup'
import { NON_NULL_BYTES32, NON_ZERO_ADDRESS } from '../../../helpers' import { NON_NULL_BYTES32, NON_ZERO_ADDRESS } from '../../../helpers'
import { getContractInterface, predeploys } from '../../../../src' import { getContractInterface, predeploys } from '../../../../src'
......
import { expect } from '../../../setup'
/* External Imports */ /* External Imports */
import { ethers } from 'hardhat' import { ethers } from 'hardhat'
import { Signer, ContractFactory, Contract } from 'ethers' import { Signer, ContractFactory, Contract } from 'ethers'
...@@ -13,6 +11,7 @@ import { keccak256 } from 'ethers/lib/utils' ...@@ -13,6 +11,7 @@ import { keccak256 } from 'ethers/lib/utils'
import _ from 'lodash' import _ from 'lodash'
/* Internal Imports */ /* Internal Imports */
import { expect } from '../../../setup'
import { import {
makeAddressManager, makeAddressManager,
setProxyTarget, setProxyTarget,
......
import { expect } from '../../../setup'
/* External Imports */ /* External Imports */
import { ethers } from 'hardhat' import { ethers } from 'hardhat'
import { Signer, ContractFactory, Contract, constants } from 'ethers' import { Signer, ContractFactory, Contract, constants } from 'ethers'
import { smockit, MockContract } from '@eth-optimism/smock' import { smockit, MockContract } from '@eth-optimism/smock'
/* Internal Imports */ /* Internal Imports */
import { expect } from '../../../setup'
import { import {
makeAddressManager, makeAddressManager,
setProxyTarget, setProxyTarget,
......
import { expect } from '../../../setup'
/* External Imports */ /* External Imports */
import { ethers } from 'hardhat' import { ethers } from 'hardhat'
import { Signer, Contract } from 'ethers' import { Signer, Contract } from 'ethers'
/* Internal Imports */ /* Internal Imports */
import { expect } from '../../../setup'
import { makeAddressManager } from '../../../helpers' import { makeAddressManager } from '../../../helpers'
describe('BondManager', () => { describe('BondManager', () => {
......
import { expect } from '../../../setup'
/* External Imports */ /* External Imports */
import hre, { ethers } from 'hardhat' import hre, { ethers } from 'hardhat'
import { Signer, ContractFactory, Contract } from 'ethers' import { Signer, ContractFactory, Contract } from 'ethers'
...@@ -7,6 +5,7 @@ import { smockit, MockContract } from '@eth-optimism/smock' ...@@ -7,6 +5,7 @@ import { smockit, MockContract } from '@eth-optimism/smock'
import { applyL1ToL2Alias } from '@eth-optimism/core-utils' import { applyL1ToL2Alias } from '@eth-optimism/core-utils'
/* Internal Imports */ /* Internal Imports */
import { expect } from '../../../setup'
import { import {
NON_NULL_BYTES32, NON_NULL_BYTES32,
NON_ZERO_ADDRESS, NON_ZERO_ADDRESS,
......
import { expect } from '../../../setup'
/* External Imports */ /* External Imports */
import { ethers } from 'hardhat' import { ethers } from 'hardhat'
import { Signer, ContractFactory, Contract } from 'ethers' import { Signer, ContractFactory, Contract } from 'ethers'
...@@ -11,8 +9,8 @@ import { ...@@ -11,8 +9,8 @@ import {
} from '@eth-optimism/smock' } from '@eth-optimism/smock'
/* Internal Imports */ /* Internal Imports */
import { expect } from '../../../setup'
import { NON_NULL_BYTES32, NON_ZERO_ADDRESS } from '../../../helpers' import { NON_NULL_BYTES32, NON_ZERO_ADDRESS } from '../../../helpers'
import { getContractInterface } from '../../../../src' import { getContractInterface } from '../../../../src'
const ERR_INVALID_MESSENGER = 'OVM_XCHAIN: messenger contract unauthenticated' const ERR_INVALID_MESSENGER = 'OVM_XCHAIN: messenger contract unauthenticated'
......
import { expect } from '../../../setup'
/* External Imports */ /* External Imports */
import { ethers } from 'hardhat' import { ethers } from 'hardhat'
import { Signer, ContractFactory, Contract } from 'ethers' import { Signer, ContractFactory, Contract } from 'ethers'
import { smoddit } from '@eth-optimism/smock' import { smoddit } from '@eth-optimism/smock'
/* Internal Imports */ /* Internal Imports */
import { expect } from '../../../setup'
import { predeploys, getContractInterface } from '../../../../src' import { predeploys, getContractInterface } from '../../../../src'
describe('L2StandardTokenFactory', () => { describe('L2StandardTokenFactory', () => {
......
import { expect } from '../../../setup'
/* External Imports */ /* External Imports */
import { ethers } from 'hardhat' import { ethers } from 'hardhat'
import { ContractFactory, Contract, Signer } from 'ethers' import { ContractFactory, Contract, Signer } from 'ethers'
import { expect } from '../../../setup'
describe('OVM_ETH', () => { describe('OVM_ETH', () => {
let signer1: Signer let signer1: Signer
let signer2: Signer let signer2: Signer
......
import { expect } from '../../../setup'
/* External Imports */ /* External Imports */
import { ethers } from 'hardhat' import { ethers } from 'hardhat'
import { ContractFactory, Contract, Signer } from 'ethers' import { ContractFactory, Contract, Signer } from 'ethers'
import { calculateL1GasUsed, calculateL1Fee } from '@eth-optimism/core-utils' import { calculateL1GasUsed, calculateL1Fee } from '@eth-optimism/core-utils'
import { expect } from '../../../setup'
describe('OVM_GasPriceOracle', () => { describe('OVM_GasPriceOracle', () => {
const initialGasPrice = 0 const initialGasPrice = 0
let signer1: Signer let signer1: Signer
......
import { expect } from '../../../setup'
/* External Imports */ /* External Imports */
import { ethers } from 'hardhat' import { ethers } from 'hardhat'
import { ContractFactory, Contract } from 'ethers' import { ContractFactory, Contract } from 'ethers'
...@@ -8,6 +6,7 @@ import { remove0x } from '@eth-optimism/core-utils' ...@@ -8,6 +6,7 @@ import { remove0x } from '@eth-optimism/core-utils'
import { keccak256 } from 'ethers/lib/utils' import { keccak256 } from 'ethers/lib/utils'
/* Internal Imports */ /* Internal Imports */
import { expect } from '../../../setup'
import { NON_ZERO_ADDRESS } from '../../../helpers/constants' import { NON_ZERO_ADDRESS } from '../../../helpers/constants'
const ELEMENT_TEST_SIZES = [1, 2, 4, 8, 16] const ELEMENT_TEST_SIZES = [1, 2, 4, 8, 16]
......
import { expect } from '../../../setup'
/* Imports: External */ /* Imports: External */
import hre from 'hardhat' import hre from 'hardhat'
import { MockContract, smockit } from '@eth-optimism/smock' import { MockContract, smockit } from '@eth-optimism/smock'
import { Contract, Signer } from 'ethers' import { Contract, Signer } from 'ethers'
/* Imports: Internal */ /* Imports: Internal */
import { expect } from '../../../setup'
import { predeploys } from '../../../../src' import { predeploys } from '../../../../src'
describe('OVM_SequencerFeeVault', () => { describe('OVM_SequencerFeeVault', () => {
......
import { expect } from '../../setup'
/* Imports: External */ /* Imports: External */
import hre from 'hardhat' import hre from 'hardhat'
import { Contract, Signer } from 'ethers' import { Contract, Signer } from 'ethers'
import { smockit } from '@eth-optimism/smock' import { smockit } from '@eth-optimism/smock'
/* Imports: Internal */ /* Imports: Internal */
import { expect } from '../../setup'
import { getContractInterface } from '../../../src' import { getContractInterface } from '../../../src'
describe('L1ChugSplashProxy', () => { describe('L1ChugSplashProxy', () => {
......
import '../../../setup'
/* Internal Imports */ /* Internal Imports */
import '../../../setup'
import { Lib_OVMCodec_TEST_JSON } from '../../../data' import { Lib_OVMCodec_TEST_JSON } from '../../../data'
import { runJsonTest } from '../../../helpers' import { runJsonTest } from '../../../helpers'
......
import { expect } from '../../../setup'
/* External Imports */ /* External Imports */
import { ethers } from 'hardhat' import { ethers } from 'hardhat'
import { Contract } from 'ethers' import { Contract } from 'ethers'
/* Internal Imports */ /* Internal Imports */
import { expect } from '../../../setup'
import { Lib_RLPWriter_TEST_JSON } from '../../../data' import { Lib_RLPWriter_TEST_JSON } from '../../../data'
const encode = async (Lib_RLPWriter: Contract, input: any): Promise<void> => { const encode = async (Lib_RLPWriter: Contract, input: any): Promise<void> => {
......
import { expect } from '../../../setup'
/* External Imports */ /* External Imports */
import { ethers } from 'hardhat' import { ethers } from 'hardhat'
import { Contract } from 'ethers' import { Contract } from 'ethers'
import { applyL1ToL2Alias, undoL1ToL2Alias } from '@eth-optimism/core-utils' import { applyL1ToL2Alias, undoL1ToL2Alias } from '@eth-optimism/core-utils'
import { expect } from '../../../setup'
describe('AddressAliasHelper', () => { describe('AddressAliasHelper', () => {
let AddressAliasHelper: Contract let AddressAliasHelper: Contract
before(async () => { before(async () => {
......
import { expect } from '../../../setup'
/* External Imports */ /* External Imports */
import * as rlp from 'rlp' import * as rlp from 'rlp'
import { ethers } from 'hardhat' import { ethers } from 'hardhat'
...@@ -8,6 +6,7 @@ import { fromHexString, toHexString } from '@eth-optimism/core-utils' ...@@ -8,6 +6,7 @@ import { fromHexString, toHexString } from '@eth-optimism/core-utils'
import { Trie } from 'merkle-patricia-tree/dist/baseTrie' import { Trie } from 'merkle-patricia-tree/dist/baseTrie'
/* Internal Imports */ /* Internal Imports */
import { expect } from '../../../setup'
import { TrieTestGenerator } from '../../../helpers' import { TrieTestGenerator } from '../../../helpers'
import * as officialTestJson from '../../../data/json/libraries/trie/trietest.json' import * as officialTestJson from '../../../data/json/libraries/trie/trietest.json'
import * as officialTestAnyOrderJson from '../../../data/json/libraries/trie/trieanyorder.json' import * as officialTestAnyOrderJson from '../../../data/json/libraries/trie/trieanyorder.json'
......
import { expect } from '../../../setup'
/* External Imports */ /* External Imports */
import { ethers } from 'hardhat' import { ethers } from 'hardhat'
import { Contract } from 'ethers' import { Contract } from 'ethers'
/* Internal Imports */ /* Internal Imports */
import { expect } from '../../../setup'
import { TrieTestGenerator } from '../../../helpers' import { TrieTestGenerator } from '../../../helpers'
const NODE_COUNTS = [1, 2, 128] const NODE_COUNTS = [1, 2, 128]
......
import { expect } from '../../../setup'
import hre from 'hardhat' import hre from 'hardhat'
import { Contract, ethers } from 'ethers' import { Contract, ethers } from 'ethers'
import { expect } from '../../../setup'
describe('Lib_Buffer', () => { describe('Lib_Buffer', () => {
let Lib_Buffer: Contract let Lib_Buffer: Contract
beforeEach(async () => { beforeEach(async () => {
......
/* Internal Imports */ /* Internal Imports */
import { Lib_BytesUtils_TEST_JSON } from '../../../data'
import { runJsonTest } from '../../../helpers'
/* External Imports */
import { ethers } from 'hardhat' import { ethers } from 'hardhat'
import { Contract } from 'ethers' import { Contract } from 'ethers'
/* External Imports */
import { Lib_BytesUtils_TEST_JSON } from '../../../data'
import { runJsonTest } from '../../../helpers'
import { expect } from '../../../setup' import { expect } from '../../../setup'
describe('Lib_BytesUtils', () => { describe('Lib_BytesUtils', () => {
......
import { expect } from '../../../setup'
/* External Imports */ /* External Imports */
import { ethers } from 'hardhat' import { ethers } from 'hardhat'
import { Contract, BigNumber } from 'ethers' import { Contract, BigNumber } from 'ethers'
...@@ -7,6 +5,7 @@ import { MerkleTree } from 'merkletreejs' ...@@ -7,6 +5,7 @@ import { MerkleTree } from 'merkletreejs'
import { fromHexString, toHexString } from '@eth-optimism/core-utils' import { fromHexString, toHexString } from '@eth-optimism/core-utils'
/* Internal Imports */ /* Internal Imports */
import { expect } from '../../../setup'
import { NON_NULL_BYTES32 } from '../../../helpers' import { NON_NULL_BYTES32 } from '../../../helpers'
const NODE_COUNTS = [ const NODE_COUNTS = [
......
import { NON_ZERO_ADDRESS } from '../constants'
import { ethers } from 'hardhat' import { ethers } from 'hardhat'
import { NON_ZERO_ADDRESS } from '../constants'
export const DUMMY_BATCH_HEADERS = [ export const DUMMY_BATCH_HEADERS = [
{ {
batchIndex: 0, batchIndex: 0,
......
import { expect } from '../../setup'
/* External Imports */ /* External Imports */
import { ethers } from 'hardhat' import { ethers } from 'hardhat'
import { Contract, BigNumber } from 'ethers' import { Contract, BigNumber } from 'ethers'
import { expect } from '../../setup'
const bigNumberify = (arr: any[]) => { const bigNumberify = (arr: any[]) => {
return arr.map((el: any) => { return arr.map((el: any) => {
if (typeof el === 'number') { if (typeof el === 'number') {
......
import { ethers } from 'ethers' import { ethers } from 'ethers'
import { bnToAddress } from './bn' import { bnToAddress } from './bn'
// Constant representing the alias to apply to the msg.sender when a contract sends an L1 => L2 // Constant representing the alias to apply to the msg.sender when a contract sends an L1 => L2
......
export interface Bcfg {
load: (options: { env?: boolean; argv?: boolean }) => void
str: (name: string, defaultValue?: string) => string
uint: (name: string, defaultValue?: number) => number
bool: (name: string, defaultValue?: boolean) => boolean
ufloat: (name: string, defaultValue?: number) => number
has: (name: string) => boolean
}
import { ethers } from 'ethers' import { ethers } from 'ethers'
import { remove0x, add0x } from './common/hex-strings' import { remove0x, add0x } from './common/hex-strings'
/** /**
......
export * from './types'
export * from './sequencer-batch' export * from './sequencer-batch'
import { add0x, remove0x, encodeHex } from '../common'
import { BigNumber, ethers } from 'ethers' import { BigNumber, ethers } from 'ethers'
import { add0x, remove0x, encodeHex } from '../common'
export interface BatchContext { export interface BatchContext {
numSequencedTransactions: number numSequencedTransactions: number
numSubsequentQueueTransactions: number numSubsequentQueueTransactions: number
......
import { expect } from 'chai' import { expect } from 'chai'
import { BigNumber } from 'ethers' import { BigNumber } from 'ethers'
import { sleep } from './misc' import { sleep } from './misc'
interface deviationRanges { interface deviationRanges {
......
...@@ -3,6 +3,7 @@ ...@@ -3,6 +3,7 @@
*/ */
import { BigNumber } from 'ethers' import { BigNumber } from 'ethers'
import { remove0x } from './common' import { remove0x } from './common'
const txDataZeroGas = 4 const txDataZeroGas = 4
......
...@@ -2,8 +2,7 @@ export * from './coders' ...@@ -2,8 +2,7 @@ export * from './coders'
export * from './common' export * from './common'
export * from './watcher' export * from './watcher'
export * from './l2context' export * from './l2context'
export * from './batches' export * from './types'
export * from './bcfg'
export * from './fees' export * from './fees'
export * from './provider' export * from './provider'
export * from './alias' export * from './alias'
......
// Use this file for simple types that aren't necessarily associated with a specific project or
// package. Often used for alias types like Address = string.
export interface Signature { export interface Signature {
r: string r: string
s: string s: string
......
/**
* TypeScript typings for bcoin's BCFG config parser (https://github.com/bcoin-org/bcfg)
* This is NOT a complete set of typings, just what we use at Optimism at the moment.
* We could consider expanding this into a full set of typings in the future.
*/
export interface Bcfg {
/**
* Loads configuration values from the environment. Must be called before environment variables
* can be accessed with other methods like str(...) or uint(...).
*
* @param options Options to use when loading arguments.
* @param options.env Boolean, whether or not to load from process.env.
* @param options.argv Boolean, whether or not to load from process.argv.
*/
load: (options: { env?: boolean; argv?: boolean }) => void
/**
* Returns the variable with the given name and casts it as a string. Queries from the
* environment or from argv depending on which were loaded when load() was called.
*
* @param name Name of the variable to query.
* @param defaultValue Optional default value if the variable does not exist.
* @returns Variable cast to a string.
*/
str: (name: string, defaultValue?: string) => string
/**
* Returns the variable with the given name and casts it as a uint. Will throw an error if the
* variable cannot be cast into a uint. Queries from the environment or from argv depending on
* which were loaded when load() was called.
*
* @param name Name of the variable to query.
* @param defaultValue Optional default value if the variable does not exist.
* @returns Variable cast to a uint.
*/
uint: (name: string, defaultValue?: number) => number
/**
* Returns the variable with the given name and casts it as a bool. Will throw an error if the
* variable cannot be cast into a bool. Queries from the environment or from argv depending on
* which were loaded when load() was called.
*
* @param name Name of the variable to query.
* @param defaultValue Optional default value if the variable does not exist.
* @returns Variable cast to a bool.
*/
bool: (name: string, defaultValue?: boolean) => boolean
/**
* Returns the variable with the given name and casts it as a ufloat. Will throw an error if the
* variable cannot be cast into a ufloat. Queries from the environment or from argv depending on
* which were loaded when load() was called.
*
* @param name Name of the variable to query.
* @param defaultValue Optional default value if the variable does not exist.
* @returns Variable cast to a ufloat.
*/
ufloat: (name: string, defaultValue?: number) => number
/**
* Checks if the given variable exists.
*
* @param name Name of the variable to query.
* @returns True if the variable exists, false otherwise.
*/
has: (name: string) => boolean
}
// Optimism PBC 2021 // Types explicitly related to dealing with Geth.
// Represents the ethereum state /**
* Represents the Ethereum state, in the format that Geth expects it.
*/
export interface State { export interface State {
[address: string]: { [address: string]: {
nonce: number nonce: number
...@@ -14,7 +16,9 @@ export interface State { ...@@ -14,7 +16,9 @@ export interface State {
} }
} }
// Represents a genesis file that geth can consume /**
* Represents Geth's genesis file format.
*/
export interface Genesis { export interface Genesis {
config: { config: {
chainId: number chainId: number
......
export * from './geth'
export * from './bcfg'
export * from './rollup'
export * from './basic'
...@@ -4,6 +4,9 @@ import { ...@@ -4,6 +4,9 @@ import {
TransactionResponse, TransactionResponse,
} from '@ethersproject/abstract-provider' } from '@ethersproject/abstract-provider'
/**
* Structure of the response returned by L2Geth nodes when querying the `rollup_getInfo` endpoint.
*/
export interface RollupInfo { export interface RollupInfo {
mode: 'sequencer' | 'verifier' mode: 'sequencer' | 'verifier'
syncing: boolean syncing: boolean
...@@ -17,14 +20,18 @@ export interface RollupInfo { ...@@ -17,14 +20,18 @@ export interface RollupInfo {
} }
} }
/**
* Enum used for the two transaction types (queue and direct to Sequencer).
*/
export enum QueueOrigin { export enum QueueOrigin {
Sequencer = 'sequencer', Sequencer = 'sequencer',
L1ToL2 = 'l1', L1ToL2 = 'l1',
} }
/** /**
* Transaction & Blocks. These are the true data-types we expect * JSON transaction representation when returned by L2Geth nodes. This is simply an extension to
* from running a batch submitter. * the standard transaction response type. You do NOT need to use this type unless you care about
* having typed access to L2-specific fields.
*/ */
export interface L2Transaction extends TransactionResponse { export interface L2Transaction extends TransactionResponse {
l1BlockNumber: number l1BlockNumber: number
...@@ -33,21 +40,32 @@ export interface L2Transaction extends TransactionResponse { ...@@ -33,21 +40,32 @@ export interface L2Transaction extends TransactionResponse {
rawTransaction: string rawTransaction: string
} }
/**
* JSON block representation when returned by L2Geth nodes. Just a normal block but with
* L2Transaction objects instead of the standard transaction response object.
*/
export interface L2Block extends BlockWithTransactions { export interface L2Block extends BlockWithTransactions {
stateRoot: string stateRoot: string
transactions: [L2Transaction] transactions: [L2Transaction]
} }
/** /**
* BatchElement & Batch. These are the data-types of the compressed / batched * Generic batch element, either a state root batch element or a transaction batch element.
* block data we submit to L1.
*/ */
export interface BatchElement { export interface BatchElement {
// Only exists on state root batch elements.
stateRoot: string stateRoot: string
// Only exists on transaction batch elements.
isSequencerTx: boolean isSequencerTx: boolean
rawTransaction: undefined | string rawTransaction: undefined | string
// Batch element context, exists on all batch elements.
timestamp: number timestamp: number
blockNumber: number blockNumber: number
} }
/**
* List of batch elements.
*/
export type Batch = BatchElement[] export type Batch = BatchElement[]
import './setup' import './setup'
/* Internal Imports */ /* Internal Imports */
import { expect } from 'chai'
import { import {
encodeAppendSequencerBatch, encodeAppendSequencerBatch,
decodeAppendSequencerBatch, decodeAppendSequencerBatch,
sequencerBatch, sequencerBatch,
} from '../src' } from '../src'
import { expect } from 'chai'
describe('BatchEncoder', () => { describe('BatchEncoder', () => {
describe('appendSequencerBatch', () => { describe('appendSequencerBatch', () => {
......
import { expect } from './setup'
import { BigNumber } from 'ethers' import { BigNumber } from 'ethers'
/* Imports: Internal */ /* Imports: Internal */
import { expect } from './setup'
import { import {
toRpcHexString, toRpcHexString,
remove0x, remove0x,
......
import { expect } from './setup'
/* Imports: Internal */ /* Imports: Internal */
import { expect } from './setup'
import { sleep } from '../src' import { sleep } from '../src'
describe('sleep', async () => { describe('sleep', async () => {
......
import { expect } from './setup' import { assert } from 'chai'
/* Imports: Internal */ /* Imports: Internal */
import { expect } from './setup'
import { expectApprox, awaitCondition } from '../src' import { expectApprox, awaitCondition } from '../src'
import { assert } from 'chai'
describe('awaitCondition', () => { describe('awaitCondition', () => {
it('should try the condition fn until it returns true', async () => { it('should try the condition fn until it returns true', async () => {
......
...@@ -3,6 +3,7 @@ import { LevelUp } from 'levelup' ...@@ -3,6 +3,7 @@ import { LevelUp } from 'levelup'
import { BigNumber } from 'ethers' import { BigNumber } from 'ethers'
/* Imports: Internal */ /* Imports: Internal */
import { SimpleDB } from './simple-db'
import { import {
EnqueueEntry, EnqueueEntry,
StateRootBatchEntry, StateRootBatchEntry,
...@@ -10,7 +11,6 @@ import { ...@@ -10,7 +11,6 @@ import {
TransactionBatchEntry, TransactionBatchEntry,
TransactionEntry, TransactionEntry,
} from '../types/database-types' } from '../types/database-types'
import { SimpleDB } from './simple-db'
const TRANSPORT_DB_KEYS = { const TRANSPORT_DB_KEYS = {
ENQUEUE: `enqueue`, ENQUEUE: `enqueue`,
......
...@@ -9,6 +9,7 @@ import { ...@@ -9,6 +9,7 @@ import {
import { SequencerBatchAppendedEvent } from '@eth-optimism/contracts/dist/types/CanonicalTransactionChain' import { SequencerBatchAppendedEvent } from '@eth-optimism/contracts/dist/types/CanonicalTransactionChain'
/* Imports: Internal */ /* Imports: Internal */
import { MissingElementError } from './errors'
import { import {
DecodedSequencerBatchTransaction, DecodedSequencerBatchTransaction,
SequencerBatchAppendedExtraData, SequencerBatchAppendedExtraData,
...@@ -18,7 +19,6 @@ import { ...@@ -18,7 +19,6 @@ import {
EventHandlerSet, EventHandlerSet,
} from '../../../types' } from '../../../types'
import { SEQUENCER_GAS_LIMIT, parseSignatureVParam } from '../../../utils' import { SEQUENCER_GAS_LIMIT, parseSignatureVParam } from '../../../utils'
import { MissingElementError } from './errors'
export const handleEventsSequencerBatchAppended: EventHandlerSet< export const handleEventsSequencerBatchAppended: EventHandlerSet<
SequencerBatchAppendedEvent, SequencerBatchAppendedEvent,
......
...@@ -4,6 +4,7 @@ import { getContractFactory } from '@eth-optimism/contracts' ...@@ -4,6 +4,7 @@ import { getContractFactory } from '@eth-optimism/contracts'
import { BigNumber } from 'ethers' import { BigNumber } from 'ethers'
/* Imports: Internal */ /* Imports: Internal */
import { MissingElementError } from './errors'
import { import {
StateRootBatchEntry, StateRootBatchEntry,
StateBatchAppendedExtraData, StateBatchAppendedExtraData,
...@@ -11,7 +12,6 @@ import { ...@@ -11,7 +12,6 @@ import {
StateRootEntry, StateRootEntry,
EventHandlerSet, EventHandlerSet,
} from '../../../types' } from '../../../types'
import { MissingElementError } from './errors'
export const handleEventsStateBatchAppended: EventHandlerSet< export const handleEventsStateBatchAppended: EventHandlerSet<
StateBatchAppendedEvent, StateBatchAppendedEvent,
......
...@@ -3,8 +3,8 @@ import { BigNumber } from 'ethers' ...@@ -3,8 +3,8 @@ import { BigNumber } from 'ethers'
import { TransactionEnqueuedEvent } from '@eth-optimism/contracts/dist/types/CanonicalTransactionChain' import { TransactionEnqueuedEvent } from '@eth-optimism/contracts/dist/types/CanonicalTransactionChain'
/* Imports: Internal */ /* Imports: Internal */
import { EnqueueEntry, EventHandlerSet } from '../../../types'
import { MissingElementError } from './errors' import { MissingElementError } from './errors'
import { EnqueueEntry, EventHandlerSet } from '../../../types'
export const handleEventsTransactionEnqueued: EventHandlerSet< export const handleEventsTransactionEnqueued: EventHandlerSet<
TransactionEnqueuedEvent, TransactionEnqueuedEvent,
......
...@@ -8,6 +8,10 @@ import { constants } from 'ethers' ...@@ -8,6 +8,10 @@ import { constants } from 'ethers'
import { Gauge, Counter } from 'prom-client' import { Gauge, Counter } from 'prom-client'
/* Imports: Internal */ /* Imports: Internal */
import { handleEventsTransactionEnqueued } from './handlers/transaction-enqueued'
import { handleEventsSequencerBatchAppended } from './handlers/sequencer-batch-appended'
import { handleEventsStateBatchAppended } from './handlers/state-batch-appended'
import { MissingElementError } from './handlers/errors'
import { TransportDB } from '../../db/transport-db' import { TransportDB } from '../../db/transport-db'
import { import {
OptimismContracts, OptimismContracts,
...@@ -17,11 +21,7 @@ import { ...@@ -17,11 +21,7 @@ import {
validators, validators,
} from '../../utils' } from '../../utils'
import { EventHandlerSet } from '../../types' import { EventHandlerSet } from '../../types'
import { handleEventsTransactionEnqueued } from './handlers/transaction-enqueued'
import { handleEventsSequencerBatchAppended } from './handlers/sequencer-batch-appended'
import { handleEventsStateBatchAppended } from './handlers/state-batch-appended'
import { L1DataTransportServiceOptions } from '../main/service' import { L1DataTransportServiceOptions } from '../main/service'
import { MissingElementError } from './handlers/errors'
interface L1IngestionMetrics { interface L1IngestionMetrics {
highestSyncedL1Block: Gauge<string> highestSyncedL1Block: Gauge<string>
......
...@@ -8,10 +8,10 @@ import bfj from 'bfj' ...@@ -8,10 +8,10 @@ import bfj from 'bfj'
import { Gauge } from 'prom-client' import { Gauge } from 'prom-client'
/* Imports: Internal */ /* Imports: Internal */
import { handleSequencerBlock } from './handlers/transaction'
import { TransportDB } from '../../db/transport-db' import { TransportDB } from '../../db/transport-db'
import { sleep, toRpcHexString, validators } from '../../utils' import { sleep, toRpcHexString, validators } from '../../utils'
import { L1DataTransportServiceOptions } from '../main/service' import { L1DataTransportServiceOptions } from '../main/service'
import { handleSequencerBlock } from './handlers/transaction'
interface L2IngestionMetrics { interface L2IngestionMetrics {
highestSyncedL2Block: Gauge<string> highestSyncedL2Block: Gauge<string>
......
...@@ -2,13 +2,13 @@ ...@@ -2,13 +2,13 @@
import { BaseService, Metrics } from '@eth-optimism/common-ts' import { BaseService, Metrics } from '@eth-optimism/common-ts'
import { LevelUp } from 'levelup' import { LevelUp } from 'levelup'
import level from 'level' import level from 'level'
import { Counter } from 'prom-client'
/* Imports: Internal */ /* Imports: Internal */
import { L1IngestionService } from '../l1-ingestion/service' import { L1IngestionService } from '../l1-ingestion/service'
import { L1TransportServer } from '../server/service' import { L1TransportServer } from '../server/service'
import { validators } from '../../utils' import { validators } from '../../utils'
import { L2IngestionService } from '../l2-ingestion/service' import { L2IngestionService } from '../l2-ingestion/service'
import { Counter } from 'prom-client'
export interface L1DataTransportServiceOptions { export interface L1DataTransportServiceOptions {
nodeEnv: string nodeEnv: string
......
...@@ -2,13 +2,13 @@ import { BaseProvider } from '@ethersproject/providers' ...@@ -2,13 +2,13 @@ import { BaseProvider } from '@ethersproject/providers'
import { BigNumber } from 'ethers' import { BigNumber } from 'ethers'
import { TypedEvent } from '@eth-optimism/contracts/dist/types/common' import { TypedEvent } from '@eth-optimism/contracts/dist/types/common'
import { TransportDB } from '../db/transport-db'
import { import {
TransactionBatchEntry, TransactionBatchEntry,
TransactionEntry, TransactionEntry,
StateRootBatchEntry, StateRootBatchEntry,
StateRootEntry, StateRootEntry,
} from './database-types' } from './database-types'
import { TransportDB } from '../db/transport-db'
export type GetExtraDataHandler<TEvent extends TypedEvent, TExtraData> = ( export type GetExtraDataHandler<TEvent extends TypedEvent, TExtraData> = (
event?: TEvent, event?: TEvent,
......
import { fromHexString } from '@eth-optimism/core-utils'
import * as url from 'url' import * as url from 'url'
import { fromHexString } from '@eth-optimism/core-utils'
export const validators = { export const validators = {
isBoolean: (val: any): boolean => { isBoolean: (val: any): boolean => {
return typeof val === 'boolean' return typeof val === 'boolean'
......
import { expect } from '../../../../setup'
/* Imports: External */ /* Imports: External */
import { BigNumber } from 'ethers' import { BigNumber } from 'ethers'
import { Block } from '@ethersproject/abstract-provider' import { Block } from '@ethersproject/abstract-provider'
/* Imports: Internal */ /* Imports: Internal */
import { expect } from '../../../../setup'
import { handleEventsStateBatchAppended } from '../../../../../src/services/l1-ingestion/handlers/state-batch-appended' import { handleEventsStateBatchAppended } from '../../../../../src/services/l1-ingestion/handlers/state-batch-appended'
import { StateBatchAppendedExtraData } from '../../../../../src/types' import { StateBatchAppendedExtraData } from '../../../../../src/types'
import { l1StateBatchData } from '../../../examples/l1-data' import { l1StateBatchData } from '../../../examples/l1-data'
......
import { expect } from '../../../../setup'
/* Imports: External */ /* Imports: External */
import { ethers, BigNumber } from 'ethers' import { ethers, BigNumber } from 'ethers'
/* Imports: Internal */ /* Imports: Internal */
import { expect } from '../../../../setup'
import { handleEventsTransactionEnqueued } from '../../../../../src/services/l1-ingestion/handlers/transaction-enqueued' import { handleEventsTransactionEnqueued } from '../../../../../src/services/l1-ingestion/handlers/transaction-enqueued'
const MAX_ITERATIONS = 128 const MAX_ITERATIONS = 128
......
import { expect } from '../../../../setup'
/* Imports: Internal */ /* Imports: Internal */
import { expect } from '../../../../setup'
import { l2Block } from '../../../examples/l2-data' import { l2Block } from '../../../examples/l2-data'
import { handleSequencerBlock } from '../../../../../src/services/l2-ingestion/handlers/transaction' import { handleSequencerBlock } from '../../../../../src/services/l2-ingestion/handlers/transaction'
......
import { Wallet, providers } from 'ethers' import { Wallet, providers } from 'ethers'
import { MessageRelayerService } from '../service'
import { Bcfg } from '@eth-optimism/core-utils' import { Bcfg } from '@eth-optimism/core-utils'
import { Logger, LoggerOptions } from '@eth-optimism/common-ts' import { Logger, LoggerOptions } from '@eth-optimism/common-ts'
import * as Sentry from '@sentry/node' import * as Sentry from '@sentry/node'
import * as dotenv from 'dotenv' import * as dotenv from 'dotenv'
import Config from 'bcfg' import Config from 'bcfg'
import { MessageRelayerService } from '../service'
dotenv.config() dotenv.config()
const main = async () => { const main = async () => {
......
...@@ -2,16 +2,15 @@ ...@@ -2,16 +2,15 @@
import { Contract, ethers, Wallet, BigNumber, providers } from 'ethers' import { Contract, ethers, Wallet, BigNumber, providers } from 'ethers'
import * as rlp from 'rlp' import * as rlp from 'rlp'
import { MerkleTree } from 'merkletreejs' import { MerkleTree } from 'merkletreejs'
/* Imports: Internal */
import { fromHexString, sleep } from '@eth-optimism/core-utils' import { fromHexString, sleep } from '@eth-optimism/core-utils'
import { Logger, BaseService, Metrics } from '@eth-optimism/common-ts' import { Logger, BaseService, Metrics } from '@eth-optimism/common-ts'
import { import {
loadContract, loadContract,
loadContractFromManager, loadContractFromManager,
predeploys, predeploys,
} from '@eth-optimism/contracts' } from '@eth-optimism/contracts'
/* Imports: Internal */
import { StateRootBatchHeader, SentMessage, SentMessageProof } from './types' import { StateRootBatchHeader, SentMessage, SentMessageProof } from './types'
interface MessageRelayerOptions { interface MessageRelayerOptions {
......
import { expect } from '../setup'
/* Imports: External */ /* Imports: External */
import hre from 'hardhat' import hre from 'hardhat'
import { Contract, Signer } from 'ethers' import { Contract, Signer } from 'ethers'
...@@ -8,6 +6,7 @@ import { smockit } from '@eth-optimism/smock' ...@@ -8,6 +6,7 @@ import { smockit } from '@eth-optimism/smock'
import { toPlainObject } from 'lodash' import { toPlainObject } from 'lodash'
/* Imports: Internal */ /* Imports: Internal */
import { expect } from '../setup'
import { import {
getMerkleTreeProof, getMerkleTreeProof,
getMessagesAndProofsForL2Transaction, getMessagesAndProofsForL2Transaction,
......
...@@ -62,7 +62,7 @@ ...@@ -62,7 +62,7 @@
"ethers": "^5.4.5", "ethers": "^5.4.5",
"lint-staged": "11.0.0", "lint-staged": "11.0.0",
"mocha": "^9.1.2", "mocha": "^9.1.2",
"node-fetch": "2.6.5", "node-fetch": "2.6.7",
"solc": "0.8.7-fixed", "solc": "0.8.7-fixed",
"ts-mocha": "^8.0.0", "ts-mocha": "^8.0.0",
"ts-node": "^10.0.0" "ts-node": "^10.0.0"
......
...@@ -6,6 +6,7 @@ import { ...@@ -6,6 +6,7 @@ import {
POOL_INIT_CODE_HASH_OPTIMISM_KOVAN, POOL_INIT_CODE_HASH_OPTIMISM_KOVAN,
} from '@uniswap/v3-sdk' } from '@uniswap/v3-sdk'
import { Token } from '@uniswap/sdk-core' import { Token } from '@uniswap/sdk-core'
import { UNISWAP_V3_FACTORY_ADDRESS } from './constants' import { UNISWAP_V3_FACTORY_ADDRESS } from './constants'
import { downloadAllSolcVersions } from './solc' import { downloadAllSolcVersions } from './solc'
import { import {
......
...@@ -5,6 +5,7 @@ import { ...@@ -5,6 +5,7 @@ import {
POOL_INIT_CODE_HASH_OPTIMISM_KOVAN, POOL_INIT_CODE_HASH_OPTIMISM_KOVAN,
} from '@uniswap/v3-sdk' } from '@uniswap/v3-sdk'
import { sleep, add0x, remove0x, clone } from '@eth-optimism/core-utils' import { sleep, add0x, remove0x, clone } from '@eth-optimism/core-utils'
import { import {
OLD_ETH_ADDRESS, OLD_ETH_ADDRESS,
WETH_TRANSFER_ADDRESSES, WETH_TRANSFER_ADDRESSES,
......
/* eslint @typescript-eslint/no-var-requires: "off" */ /* eslint @typescript-eslint/no-var-requires: "off" */
import fetch from 'node-fetch'
import path from 'path'
import fs from 'fs' import fs from 'fs'
import path from 'path'
import fetch from 'node-fetch'
import { ethers } from 'ethers' import { ethers } from 'ethers'
import { clone } from '@eth-optimism/core-utils' import { clone } from '@eth-optimism/core-utils'
import setupMethods from 'solc/wrapper' import setupMethods from 'solc/wrapper'
import { import {
COMPILER_VERSIONS_TO_SOLC, COMPILER_VERSIONS_TO_SOLC,
EMSCRIPTEN_BUILD_LIST, EMSCRIPTEN_BUILD_LIST,
......
import { ethers } from 'ethers'
import fs from 'fs' import fs from 'fs'
import { ethers } from 'ethers'
import { add0x, remove0x, clone } from '@eth-optimism/core-utils' import { add0x, remove0x, clone } from '@eth-optimism/core-utils'
import { StateDump, SurgeryDataSources, AccountType } from './types' import { StateDump, SurgeryDataSources, AccountType } from './types'
import { findAccount } from './utils' import { findAccount } from './utils'
import { handlers } from './handlers' import { handlers } from './handlers'
......
/* eslint @typescript-eslint/no-var-requires: "off" */ /* eslint @typescript-eslint/no-var-requires: "off" */
import { createReadStream } from 'fs'
import * as fs from 'fs'
import * as assert from 'assert'
import { ethers } from 'ethers' import { ethers } from 'ethers'
import { abi as UNISWAP_FACTORY_ABI } from '@uniswap/v3-core/artifacts/contracts/UniswapV3Factory.sol/UniswapV3Factory.json' import { abi as UNISWAP_FACTORY_ABI } from '@uniswap/v3-core/artifacts/contracts/UniswapV3Factory.sol/UniswapV3Factory.json'
import { Interface } from '@ethersproject/abi' import { Interface } from '@ethersproject/abi'
import { parseChunked } from '@discoveryjs/json-ext' import { parseChunked } from '@discoveryjs/json-ext'
import { createReadStream } from 'fs'
import * as fs from 'fs'
import byline from 'byline' import byline from 'byline'
import * as dotenv from 'dotenv' import * as dotenv from 'dotenv'
import * as assert from 'assert'
import { reqenv, getenv, remove0x } from '@eth-optimism/core-utils' import { reqenv, getenv, remove0x } from '@eth-optimism/core-utils'
import { import {
Account, Account,
EtherscanContract, EtherscanContract,
......
import { KECCAK256_RLP_S, KECCAK256_NULL_S } from 'ethereumjs-util' import { KECCAK256_RLP_S, KECCAK256_NULL_S } from 'ethereumjs-util'
import { add0x } from '@eth-optimism/core-utils' import { add0x } from '@eth-optimism/core-utils'
import { ethers } from 'ethers' import { ethers } from 'ethers'
import { expect, env } from './setup' import { expect, env } from './setup'
import { AccountType } from '../scripts/types' import { AccountType } from '../scripts/types'
......
import { KECCAK256_RLP_S, KECCAK256_NULL_S } from 'ethereumjs-util' import { KECCAK256_RLP_S, KECCAK256_NULL_S } from 'ethereumjs-util'
import { add0x } from '@eth-optimism/core-utils' import { add0x } from '@eth-optimism/core-utils'
import { expect, env } from './setup' import { expect, env } from './setup'
import { AccountType, Account } from '../scripts/types' import { AccountType, Account } from '../scripts/types'
......
import { expect } from '@eth-optimism/core-utils/test/setup' import { expect } from '@eth-optimism/core-utils/test/setup'
import { BigNumber } from 'ethers' import { BigNumber } from 'ethers'
import { env } from './setup' import { env } from './setup'
describe('erc20', () => { describe('erc20', () => {
......
import { ethers, BigNumber, Contract } from 'ethers' import { ethers, BigNumber, Contract } from 'ethers'
import { expect, env, ERC20_ABI } from './setup' import { expect, env, ERC20_ABI } from './setup'
import { AccountType } from '../scripts/types'
import { GenesisJsonProvider } from './provider' import { GenesisJsonProvider } from './provider'
import { AccountType } from '../scripts/types'
describe('predeploys', () => { describe('predeploys', () => {
const predeploys = { const predeploys = {
......
import { expect } from '@eth-optimism/core-utils/test/setup' import { expect } from '@eth-optimism/core-utils/test/setup'
import { ethers, BigNumber } from 'ethers' import { ethers, BigNumber } from 'ethers'
import { GenesisJsonProvider } from './provider'
import { Genesis } from '@eth-optimism/core-utils/src/types' import { Genesis } from '@eth-optimism/core-utils/src/types'
import { import {
remove0x, remove0x,
...@@ -8,6 +7,8 @@ import { ...@@ -8,6 +7,8 @@ import {
} from '@eth-optimism/core-utils/src/common/hex-strings' } from '@eth-optimism/core-utils/src/common/hex-strings'
import { KECCAK256_RLP_S, KECCAK256_NULL_S } from 'ethereumjs-util' import { KECCAK256_RLP_S, KECCAK256_NULL_S } from 'ethereumjs-util'
import { GenesisJsonProvider } from './provider'
const account = '0x66a84544bed4ca45b3c024776812abf87728fbaf' const account = '0x66a84544bed4ca45b3c024776812abf87728fbaf'
const genesis: Genesis = { const genesis: Genesis = {
......
import path from 'path'
import { ethers } from 'ethers' import { ethers } from 'ethers'
import { BigNumber } from '@ethersproject/bignumber' import { BigNumber } from '@ethersproject/bignumber'
import { Deferrable } from '@ethersproject/properties' import { Deferrable } from '@ethersproject/properties'
...@@ -16,8 +18,6 @@ import { ...@@ -16,8 +18,6 @@ import {
Listener, Listener,
} from '@ethersproject/abstract-provider' } from '@ethersproject/abstract-provider'
import { KECCAK256_RLP_S, KECCAK256_NULL_S } from 'ethereumjs-util' import { KECCAK256_RLP_S, KECCAK256_NULL_S } from 'ethereumjs-util'
import path from 'path'
import { bytes32ify, remove0x, add0x } from '@eth-optimism/core-utils' import { bytes32ify, remove0x, add0x } from '@eth-optimism/core-utils'
// Represents the ethereum state // Represents the ethereum state
......
...@@ -6,10 +6,11 @@ import * as dotenv from 'dotenv' ...@@ -6,10 +6,11 @@ import * as dotenv from 'dotenv'
import { getenv, remove0x } from '@eth-optimism/core-utils' import { getenv, remove0x } from '@eth-optimism/core-utils'
import { providers, BigNumber } from 'ethers' import { providers, BigNumber } from 'ethers'
import { solidity } from 'ethereum-waffle' import { solidity } from 'ethereum-waffle'
import { GenesisJsonProvider } from './provider'
import { SurgeryDataSources, Account, AccountType } from '../scripts/types' import { SurgeryDataSources, Account, AccountType } from '../scripts/types'
import { loadSurgeryData } from '../scripts/data' import { loadSurgeryData } from '../scripts/data'
import { classify, classifiers } from '../scripts/classifiers' import { classify, classifiers } from '../scripts/classifiers'
import { GenesisJsonProvider } from './provider'
// Chai plugins go here. // Chai plugins go here.
chai.use(chaiAsPromised) chai.use(chaiAsPromised)
......
import { ethers } from 'ethers' import { ethers } from 'ethers'
import { abi as UNISWAP_POOL_ABI } from '@uniswap/v3-core/artifacts/contracts/UniswapV3Pool.sol/UniswapV3Pool.json' import { abi as UNISWAP_POOL_ABI } from '@uniswap/v3-core/artifacts/contracts/UniswapV3Pool.sol/UniswapV3Pool.json'
import { expect, env, ERC20_ABI } from './setup'
import { UNISWAP_V3_NFPM_ADDRESS } from '../scripts/constants' import { UNISWAP_V3_NFPM_ADDRESS } from '../scripts/constants'
import { getUniswapV3Factory, replaceWETH } from '../scripts/utils' import { getUniswapV3Factory, replaceWETH } from '../scripts/utils'
import { expect, env, ERC20_ABI } from './setup'
import { AccountType } from '../scripts/types' import { AccountType } from '../scripts/types'
describe('uniswap contracts', () => { describe('uniswap contracts', () => {
......
import { expect } from '@eth-optimism/core-utils/test/setup'
import fs from 'fs/promises' import fs from 'fs/promises'
import path from 'path' import path from 'path'
import { expect } from '@eth-optimism/core-utils/test/setup'
import { isBytecodeERC20 } from '../scripts/utils' import { isBytecodeERC20 } from '../scripts/utils'
describe('Utils', () => { describe('Utils', () => {
......
import express from 'express'
import { Server } from 'net' import { Server } from 'net'
import express from 'express'
import promBundle from 'express-prom-bundle' import promBundle from 'express-prom-bundle'
import { Gauge, Histogram } from 'prom-client' import { Gauge, Histogram } from 'prom-client'
import cron from 'node-cron' import cron from 'node-cron'
......
...@@ -6,6 +6,7 @@ import { ...@@ -6,6 +6,7 @@ import {
} from '@ethersproject/abstract-provider' } from '@ethersproject/abstract-provider'
import { ethers, BigNumber, Event } from 'ethers' import { ethers, BigNumber, Event } from 'ethers'
import { sleep } from '@eth-optimism/core-utils' import { sleep } from '@eth-optimism/core-utils'
import { import {
ICrossChainProvider, ICrossChainProvider,
OEContracts, OEContracts,
...@@ -420,9 +421,27 @@ export class CrossChainProvider implements ICrossChainProvider { ...@@ -420,9 +421,27 @@ export class CrossChainProvider implements ICrossChainProvider {
} }
public async estimateL2MessageGasLimit( public async estimateL2MessageGasLimit(
message: MessageLike message: MessageLike,
opts?: {
bufferPercent?: number
}
): Promise<BigNumber> { ): Promise<BigNumber> {
throw new Error('Not implemented') const resolved = await this.toCrossChainMessage(message)
// L2 message gas estimation is only used for L1 => L2 messages.
if (resolved.direction === MessageDirection.L2_TO_L1) {
throw new Error(`cannot estimate gas limit for L2 => L1 message`)
}
const estimate = await this.l2Provider.estimateGas({
from: resolved.sender,
to: resolved.target,
data: resolved.message,
})
// Return the estimate plus a buffer of 20% just in case.
const bufferPercent = opts?.bufferPercent || 20
return estimate.mul(100 + bufferPercent).div(100)
} }
public async estimateMessageWaitTimeSeconds( public async estimateMessageWaitTimeSeconds(
......
...@@ -3,6 +3,7 @@ import { ...@@ -3,6 +3,7 @@ import {
TransactionRequest, TransactionRequest,
TransactionResponse, TransactionResponse,
} from '@ethersproject/abstract-provider' } from '@ethersproject/abstract-provider'
import { NumberLike, L1ToL2Overrides } from './types' import { NumberLike, L1ToL2Overrides } from './types'
import { ICrossChainMessenger } from './cross-chain-messenger' import { ICrossChainMessenger } from './cross-chain-messenger'
......
...@@ -3,6 +3,7 @@ import { ...@@ -3,6 +3,7 @@ import {
TransactionRequest, TransactionRequest,
TransactionResponse, TransactionResponse,
} from '@ethersproject/abstract-provider' } from '@ethersproject/abstract-provider'
import { import {
MessageLike, MessageLike,
NumberLike, NumberLike,
......
import { BigNumber } from 'ethers' import { BigNumber } from 'ethers'
import { Provider, BlockTag } from '@ethersproject/abstract-provider' import { Provider, BlockTag } from '@ethersproject/abstract-provider'
import { import {
MessageLike, MessageLike,
TransactionLike, TransactionLike,
...@@ -200,9 +201,16 @@ export interface ICrossChainProvider { ...@@ -200,9 +201,16 @@ export interface ICrossChainProvider {
* L1 => L2 messages. You would supply this gas limit when sending the message to L2. * L1 => L2 messages. You would supply this gas limit when sending the message to L2.
* *
* @param message Message get a gas estimate for. * @param message Message get a gas estimate for.
* @param opts Options object.
* @param opts.bufferPercent Percentage of gas to add to the estimate. Defaults to 20.
* @returns Estimates L2 gas limit. * @returns Estimates L2 gas limit.
*/ */
estimateL2MessageGasLimit(message: MessageLike): Promise<BigNumber> estimateL2MessageGasLimit(
message: MessageLike,
opts?: {
bufferPercent?: number
}
): Promise<BigNumber>
/** /**
* Returns the estimated amount of time before the message can be executed. When this is a * Returns the estimated amount of time before the message can be executed. When this is a
......
import assert from 'assert' import assert from 'assert'
import { import {
Provider, Provider,
TransactionReceipt, TransactionReceipt,
TransactionResponse, TransactionResponse,
} from '@ethersproject/abstract-provider' } from '@ethersproject/abstract-provider'
import { ethers, BigNumber } from 'ethers' import { ethers, BigNumber } from 'ethers'
import { import {
ProviderLike, ProviderLike,
TransactionLike, TransactionLike,
......
import { getContractInterface, predeploys } from '@eth-optimism/contracts' import { getContractInterface, predeploys } from '@eth-optimism/contracts'
import { ethers, Contract } from 'ethers' import { ethers, Contract } from 'ethers'
import { toAddress } from './coercion'
import { DeepPartial } from './type-utils'
import { import {
OEContracts, OEContracts,
OEL1Contracts, OEL1Contracts,
...@@ -10,8 +13,6 @@ import { ...@@ -10,8 +13,6 @@ import {
CustomBridges, CustomBridges,
CustomBridgesLike, CustomBridgesLike,
} from '../interfaces' } from '../interfaces'
import { toAddress } from './coercion'
import { DeepPartial } from './type-utils'
/** /**
* Full list of default L2 contract addresses. * Full list of default L2 contract addresses.
......
import { getContractInterface } from '@eth-optimism/contracts' import { getContractInterface } from '@eth-optimism/contracts'
import { ethers } from 'ethers' import { ethers } from 'ethers'
import { CoreCrossChainMessage } from '../interfaces' import { CoreCrossChainMessage } from '../interfaces'
/** /**
......
import { expect } from './setup'
import { Provider } from '@ethersproject/abstract-provider' import { Provider } from '@ethersproject/abstract-provider'
import { expectApprox } from '@eth-optimism/core-utils'
import { Contract } from 'ethers' import { Contract } from 'ethers'
import { ethers } from 'hardhat' import { ethers } from 'hardhat'
import { expect } from './setup'
import { import {
CrossChainProvider, CrossChainProvider,
MessageDirection, MessageDirection,
...@@ -1090,7 +1092,93 @@ describe('CrossChainProvider', () => { ...@@ -1090,7 +1092,93 @@ describe('CrossChainProvider', () => {
}) })
describe('estimateL2MessageGasLimit', () => { describe('estimateL2MessageGasLimit', () => {
it('should perform a gas estimation of the L2 action') let provider: CrossChainProvider
beforeEach(async () => {
provider = new CrossChainProvider({
l1Provider: ethers.provider,
l2Provider: ethers.provider,
l1ChainId: 31337,
})
})
describe('when the message is an L1 to L2 message', () => {
it('should return an accurate gas estimate plus a ~20% buffer', async () => {
const message = {
direction: MessageDirection.L1_TO_L2,
target: '0x' + '11'.repeat(20),
sender: '0x' + '22'.repeat(20),
message: '0x' + '33'.repeat(64),
messageNonce: 1234,
logIndex: 0,
blockNumber: 1234,
transactionHash: '0x' + '44'.repeat(32),
}
const estimate = await ethers.provider.estimateGas({
to: message.target,
from: message.sender,
data: message.message,
})
// Approximately 20% greater than the estimate, +/- 1%.
expectApprox(
await provider.estimateL2MessageGasLimit(message),
estimate.mul(120).div(100),
{
percentUpperDeviation: 1,
percentLowerDeviation: 1,
}
)
})
it('should return an accurate gas estimate when a custom buffer is provided', async () => {
const message = {
direction: MessageDirection.L1_TO_L2,
target: '0x' + '11'.repeat(20),
sender: '0x' + '22'.repeat(20),
message: '0x' + '33'.repeat(64),
messageNonce: 1234,
logIndex: 0,
blockNumber: 1234,
transactionHash: '0x' + '44'.repeat(32),
}
const estimate = await ethers.provider.estimateGas({
to: message.target,
from: message.sender,
data: message.message,
})
// Approximately 30% greater than the estimate, +/- 1%.
expectApprox(
await provider.estimateL2MessageGasLimit(message, {
bufferPercent: 30,
}),
estimate.mul(130).div(100),
{
percentUpperDeviation: 1,
percentLowerDeviation: 1,
}
)
})
})
describe('when the message is an L2 to L1 message', () => {
it('should throw an error', async () => {
const message = {
direction: MessageDirection.L2_TO_L1,
target: '0x' + '11'.repeat(20),
sender: '0x' + '22'.repeat(20),
message: '0x' + '33'.repeat(64),
messageNonce: 1234,
logIndex: 0,
blockNumber: 1234,
transactionHash: '0x' + '44'.repeat(32),
}
await expect(provider.estimateL2MessageGasLimit(message)).to.be.rejected
})
})
}) })
describe('estimateMessageWaitTimeBlocks', () => { describe('estimateMessageWaitTimeBlocks', () => {
......
import { expect } from '../setup'
import { Provider } from '@ethersproject/abstract-provider' import { Provider } from '@ethersproject/abstract-provider'
import { Contract } from 'ethers' import { Contract } from 'ethers'
import { ethers } from 'hardhat' import { ethers } from 'hardhat'
import { expect } from '../setup'
import { toProvider, toTransactionHash } from '../../src' import { toProvider, toTransactionHash } from '../../src'
describe('type coercion utils', () => { describe('type coercion utils', () => {
......
/* eslint-disable @typescript-eslint/no-empty-function */ /* eslint-disable @typescript-eslint/no-empty-function */
import { Signer } from 'ethers'
import { ethers } from 'hardhat'
import { expect } from '../setup' import { expect } from '../setup'
import { import {
getOEContract, getOEContract,
...@@ -6,8 +9,6 @@ import { ...@@ -6,8 +9,6 @@ import {
CONTRACT_ADDRESSES, CONTRACT_ADDRESSES,
DEFAULT_L2_CONTRACT_ADDRESSES, DEFAULT_L2_CONTRACT_ADDRESSES,
} from '../../src' } from '../../src'
import { Signer } from 'ethers'
import { ethers } from 'hardhat'
describe('contract connection utils', () => { describe('contract connection utils', () => {
let signers: Signer[] let signers: Signer[]
......
import { expect } from '../setup'
import { Contract, Signer } from 'ethers' import { Contract, Signer } from 'ethers'
import { ethers } from 'hardhat' import { ethers } from 'hardhat'
import { getContractFactory } from '@eth-optimism/contracts' import { getContractFactory } from '@eth-optimism/contracts'
import { expect } from '../setup'
import { import {
CoreCrossChainMessage, CoreCrossChainMessage,
encodeCrossChainMessage, encodeCrossChainMessage,
......
...@@ -3210,14 +3210,6 @@ ...@@ -3210,14 +3210,6 @@
"@types/mime" "^1" "@types/mime" "^1"
"@types/node" "*" "@types/node" "*"
"@types/shelljs@^0.8.8":
version "0.8.9"
resolved "https://registry.yarnpkg.com/@types/shelljs/-/shelljs-0.8.9.tgz#45dd8501aa9882976ca3610517dac3831c2fbbf4"
integrity sha512-flVe1dvlrCyQJN/SGrnBxqHG+RzXrVKsmjD8WS/qYHpq5UPjfq7UWFBENP0ZuOl0g6OpAlL6iBoLSvKYUUmyQw==
dependencies:
"@types/glob" "*"
"@types/node" "*"
"@types/sinon-chai@^3.2.3", "@types/sinon-chai@^3.2.5": "@types/sinon-chai@^3.2.3", "@types/sinon-chai@^3.2.5":
version "3.2.5" version "3.2.5"
resolved "https://registry.yarnpkg.com/@types/sinon-chai/-/sinon-chai-3.2.5.tgz#df21ae57b10757da0b26f512145c065f2ad45c48" resolved "https://registry.yarnpkg.com/@types/sinon-chai/-/sinon-chai-3.2.5.tgz#df21ae57b10757da0b26f512145c065f2ad45c48"
...@@ -6247,13 +6239,6 @@ directory-tree@^2.2.7: ...@@ -6247,13 +6239,6 @@ directory-tree@^2.2.7:
resolved "https://registry.yarnpkg.com/directory-tree/-/directory-tree-2.3.1.tgz#78b8aa84878eb84dd29a51dcd664ded4cd0247c7" resolved "https://registry.yarnpkg.com/directory-tree/-/directory-tree-2.3.1.tgz#78b8aa84878eb84dd29a51dcd664ded4cd0247c7"
integrity sha512-hxolIHCtQ/a56CUywaLzGD/V78zPwFihI+UK/4ZjOp7GoV4Mptmtv95yavOn/RlnTi7cCMjszvfcNrwCoWLH+Q== integrity sha512-hxolIHCtQ/a56CUywaLzGD/V78zPwFihI+UK/4ZjOp7GoV4Mptmtv95yavOn/RlnTi7cCMjszvfcNrwCoWLH+Q==
docker-compose@^0.23.8:
version "0.23.13"
resolved "https://registry.yarnpkg.com/docker-compose/-/docker-compose-0.23.13.tgz#77d37bd05b6a966345f631e6d05e961c79514f06"
integrity sha512-/9fYC4g3AO+qsqxIZhmbVnFvJJPcYEV2yJbAPPXH+6AytU3urIY8lUAXOlvY8sl4u25pdKu1JrOfAmWC7lJDJg==
dependencies:
yaml "^1.10.2"
doctrine@^2.1.0: doctrine@^2.1.0:
version "2.1.0" version "2.1.0"
resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-2.1.0.tgz#5cd01fc101621b42c4cd7f5d1a66243716d3f39d" resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-2.1.0.tgz#5cd01fc101621b42c4cd7f5d1a66243716d3f39d"
...@@ -11538,15 +11523,15 @@ node-fetch@*: ...@@ -11538,15 +11523,15 @@ node-fetch@*:
data-uri-to-buffer "^3.0.1" data-uri-to-buffer "^3.0.1"
fetch-blob "^3.1.2" fetch-blob "^3.1.2"
node-fetch@2.6.1, node-fetch@^2.6.0, node-fetch@^2.6.1: node-fetch@2.6.1:
version "2.6.1" version "2.6.1"
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.1.tgz#045bd323631f76ed2e2b55573394416b639a0052" resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.1.tgz#045bd323631f76ed2e2b55573394416b639a0052"
integrity sha512-V4aYg89jEoVRxRb2fJdAg8FHvI7cEyYdVAh94HH0UIK8oJxUfkjlDQN9RbMx+bEjP7+ggMiFRprSti032Oipxw== integrity sha512-V4aYg89jEoVRxRb2fJdAg8FHvI7cEyYdVAh94HH0UIK8oJxUfkjlDQN9RbMx+bEjP7+ggMiFRprSti032Oipxw==
node-fetch@2.6.5: node-fetch@2.6.7, node-fetch@^2.6.0, node-fetch@^2.6.1:
version "2.6.5" version "2.6.7"
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.5.tgz#42735537d7f080a7e5f78b6c549b7146be1742fd" resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad"
integrity sha512-mmlIVHJEu5rnIxgEgez6b9GgWXbkZj5YZ7fx+2r94a2E+Uirsp6HsPTPlomfdHtpt/B0cdKviwkoaM6pyvUOpQ== integrity sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==
dependencies: dependencies:
whatwg-url "^5.0.0" whatwg-url "^5.0.0"
...@@ -13963,7 +13948,7 @@ shebang-regex@^3.0.0: ...@@ -13963,7 +13948,7 @@ shebang-regex@^3.0.0:
resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172"
integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==
shelljs@^0.8.3, shelljs@^0.8.4: shelljs@^0.8.3:
version "0.8.5" version "0.8.5"
resolved "https://registry.yarnpkg.com/shelljs/-/shelljs-0.8.5.tgz#de055408d8361bed66c669d2f000538ced8ee20c" resolved "https://registry.yarnpkg.com/shelljs/-/shelljs-0.8.5.tgz#de055408d8361bed66c669d2f000538ced8ee20c"
integrity sha512-TiwcRcrkhHvbrZbnRcFYMLl30Dfov3HKqzp5tO5b4pt6G/SezKcYhmDg15zXVBswHmctSAQKznqNW2LO5tTDow== integrity sha512-TiwcRcrkhHvbrZbnRcFYMLl30Dfov3HKqzp5tO5b4pt6G/SezKcYhmDg15zXVBswHmctSAQKznqNW2LO5tTDow==
...@@ -15138,9 +15123,9 @@ trim-newlines@^3.0.0: ...@@ -15138,9 +15123,9 @@ trim-newlines@^3.0.0:
integrity sha512-c1PTsA3tYrIsLGkJkzHF+w9F2EyxfXGo4UyJc4pFL++FMjnq0HJS69T3M7d//gKrFKwy429bouPescbjecU+Zw== integrity sha512-c1PTsA3tYrIsLGkJkzHF+w9F2EyxfXGo4UyJc4pFL++FMjnq0HJS69T3M7d//gKrFKwy429bouPescbjecU+Zw==
trim-off-newlines@^1.0.0: trim-off-newlines@^1.0.0:
version "1.0.1" version "1.0.3"
resolved "https://registry.yarnpkg.com/trim-off-newlines/-/trim-off-newlines-1.0.1.tgz#9f9ba9d9efa8764c387698bcbfeb2c848f11adb3" resolved "https://registry.yarnpkg.com/trim-off-newlines/-/trim-off-newlines-1.0.3.tgz#8df24847fcb821b0ab27d58ab6efec9f2fe961a1"
integrity sha1-n5up2e+odkw4dpi8v+sshI8RrbM= integrity sha512-kh6Tu6GbeSNMGfrrZh6Bb/4ZEHV1QlB4xNDBeog8Y9/QwFlKTRyWvY3Fs9tRDAMZliVUwieMgEdIeL/FtqjkJg==
trim-right@^1.0.1: trim-right@^1.0.1:
version "1.0.1" version "1.0.1"
...@@ -16639,7 +16624,7 @@ yallist@^4.0.0: ...@@ -16639,7 +16624,7 @@ yallist@^4.0.0:
resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72"
integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==
yaml@^1.10.0, yaml@^1.10.2: yaml@^1.10.0:
version "1.10.2" version "1.10.2"
resolved "https://registry.yarnpkg.com/yaml/-/yaml-1.10.2.tgz#2301c5ffbf12b467de8da2333a459e29e7920e4b" resolved "https://registry.yarnpkg.com/yaml/-/yaml-1.10.2.tgz#2301c5ffbf12b467de8da2333a459e29e7920e4b"
integrity sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg== integrity sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment