Commit 86c2fbe8 authored by mergify[bot]'s avatar mergify[bot] Committed by GitHub

Merge branch 'develop' into refcell/remove/contracts

parents 0f9ed2f6 6ab755d1
......@@ -88,11 +88,9 @@ jobs:
- "packages/contracts-governance/node_modules"
- "packages/contracts-periphery/node_modules"
- "packages/core-utils/node_modules"
- "packages/data-transport-layer/node_modules"
- "packages/drippie-mon/node_modules"
- "packages/fault-detector/node_modules"
- "packages/hardhat-deploy-config/node_modules"
- "packages/message-relayer/node_modules"
- "packages/migration-data/node_modules"
- "packages/replica-healthcheck/node_modules"
- "packages/sdk/node_modules"
......@@ -672,10 +670,6 @@ jobs:
name: Check core-utils
command: npx depcheck
working_directory: packages/core-utils
- run:
name: Check data-transport-layer
command: npx depcheck
working_directory: packages/data-transport-layer
- run:
name: Check sdk
command: npx depcheck
......@@ -1180,13 +1174,6 @@ workflows:
dependencies: "(contracts|contracts-bedrock|core-utils|hardhat-deploy-config)"
requires:
- yarn-monorepo
- js-lint-test:
name: dtl-tests
coverage_flag: dtl-tests
package_name: data-transport-layer
dependencies: "(common-ts|contracts|core-utils)"
requires:
- yarn-monorepo
- js-lint-test:
name: chain-mon-tests
coverage_flag: chain-mon-tests
......@@ -1201,13 +1188,6 @@ workflows:
dependencies: "(common-ts|contracts|core-utils|sdk)"
requires:
- yarn-monorepo
- js-lint-test:
name: message-relayer-tests
coverage_flag: message-relayer-tests
package_name: message-relayer
dependencies: "(common-ts|core-utils|sdk)"
requires:
- yarn-monorepo
- js-lint-test:
name: replica-healthcheck-tests
coverage_flag: replica-healthcheck-tests
......
......@@ -6,11 +6,9 @@
/packages/contracts-bedrock @ethereum-optimism/contract-reviewers
/packages/contracts-periphery @ethereum-optimism/contract-reviewers
/packages/core-utils @ethereum-optimism/legacy-reviewers
/packages/data-transport-layer @ethereum-optimism/legacy-reviewers
/packages/chain-mon @smartcontracts
/packages/fault-detector @ethereum-optimism/devxpod
/packages/hardhat-deploy-config @ethereum-optimism/legacy-reviewers
/packages/message-relayer @ethereum-optimism/legacy-reviewers
/packages/migration-data @ethereum-optimism/legacy-reviewers
/packages/replica-healthcheck @ethereum-optimism/legacy-reviewers
/packages/sdk @ethereum-optimism/devxpod
......
---
C-Protocol-Critical:
- 'packages/data-transport-layer/**/*.ts'
- 'packages/contracts-bedrock/**/*.sol'
- 'l2geth/**/*.go'
......@@ -16,12 +16,10 @@ jobs:
# map the step outputs to job outputs
outputs:
l2geth: ${{ steps.packages.outputs.l2geth }}
message-relayer: ${{ steps.packages.outputs.message-relayer }}
fault-detector: ${{ steps.packages.outputs.fault-detector }}
balance-mon: ${{ steps.packages.outputs.balance-mon }}
drippie-mon: ${{ steps.packages.outputs.drippie-mon }}
wd-mon: ${{ steps.packages.outputs.wd-mon }}
data-transport-layer: ${{ steps.packages.outputs.data-transport-layer }}
contracts: ${{ steps.packages.outputs.contracts }}
contracts-bedrock: ${{ steps.packages.outputs.contracts-bedrock }}
replica-healthcheck: ${{ steps.packages.outputs.replica-healthcheck }}
......@@ -149,33 +147,6 @@ jobs:
push: true
tags: ethereumoptimism/hardhat-node:${{ needs.canary-publish.outputs.canary-docker-tag }}
message-relayer:
name: Publish Message Relayer Version ${{ needs.canary-publish.outputs.canary-docker-tag }}
needs: canary-publish
if: needs.canary-publish.outputs.message-relayer != ''
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Login to Docker Hub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_ACCESS_TOKEN_USERNAME }}
password: ${{ secrets.DOCKERHUB_ACCESS_TOKEN_SECRET }}
- name: Build and push
uses: docker/build-push-action@v2
with:
context: .
file: ./ops/docker/Dockerfile.packages
target: message-relayer
push: true
tags: ethereumoptimism/message-relayer:${{ needs.canary-publish.outputs.canary-docker-tag }}
fault-detector:
name: Publish Fault Detector Version ${{ needs.canary-publish.outputs.canary-docker-tag }}
needs: canary-publish
......@@ -284,33 +255,6 @@ jobs:
push: true
tags: ethereumoptimism/wd-mon:${{ needs.canary-publish.outputs.canary-docker-tag }}
data-transport-layer:
name: Publish Data Transport Layer Version ${{ needs.canary-publish.outputs.canary-docker-tag }}
needs: canary-publish
if: needs.canary-publish.outputs.data-transport-layer != ''
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Login to Docker Hub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_ACCESS_TOKEN_USERNAME }}
password: ${{ secrets.DOCKERHUB_ACCESS_TOKEN_SECRET }}
- name: Build and push
uses: docker/build-push-action@v2
with:
context: .
file: ./ops/docker/Dockerfile.packages
target: data-transport-layer
push: true
tags: ethereumoptimism/data-transport-layer:${{ needs.canary-publish.outputs.canary-docker-tag }}
contracts:
name: Publish Deployer Version ${{ needs.canary-publish.outputs.canary-docker-tag }}
needs: canary-publish
......
......@@ -15,12 +15,10 @@ jobs:
# map the step outputs to job outputs
outputs:
l2geth: ${{ steps.packages.outputs.l2geth }}
message-relayer: ${{ steps.packages.outputs.message-relayer }}
fault-detector: ${{ steps.packages.outputs.fault-detector }}
balance-mon: ${{ steps.packages.outputs.drippie-mon }}
drippie-mon: ${{ steps.packages.outputs.drippie-mon }}
wd-mon: ${{ steps.packages.outputs.wd-mon }}
data-transport-layer: ${{ steps.packages.outputs.data-transport-layer }}
contracts: ${{ steps.packages.outputs.contracts }}
contracts-bedrock: ${{ steps.packages.outputs.contracts-bedrock }}
balance-monitor: ${{ steps.packages.outputs.balance-monitor }}
......@@ -198,33 +196,6 @@ jobs:
GITCOMMIT=${{ steps.build_args.outputs.GITCOMMIT }}
GITVERSION=${{ steps.build_args.outputs.GITVERSION }}
message-relayer:
name: Publish Message Relayer Version ${{ needs.release.outputs.message-relayer }}
needs: release
if: needs.release.outputs.message-relayer != ''
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Login to Docker Hub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_ACCESS_TOKEN_USERNAME }}
password: ${{ secrets.DOCKERHUB_ACCESS_TOKEN_SECRET }}
- name: Build and push
uses: docker/build-push-action@v2
with:
context: .
file: ./ops/docker/Dockerfile.packages
target: message-relayer
push: true
tags: ethereumoptimism/message-relayer:${{ needs.release.outputs.message-relayer }},ethereumoptimism/message-relayer:latest
fault-detector:
name: Publish Fault Detector Version ${{ needs.release.outputs.fault-detector }}
needs: release
......@@ -333,33 +304,6 @@ jobs:
push: true
tags: ethereumoptimism/drippie-mon:${{ needs.release.outputs.drippie-mon }},ethereumoptimism/drippie-mon:latest
data-transport-layer:
name: Publish Data Transport Layer Version ${{ needs.release.outputs.data-transport-layer }}
needs: release
if: needs.release.outputs.data-transport-layer != ''
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Login to Docker Hub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_ACCESS_TOKEN_USERNAME }}
password: ${{ secrets.DOCKERHUB_ACCESS_TOKEN_SECRET }}
- name: Build and push
uses: docker/build-push-action@v2
with:
context: .
file: ./ops/docker/Dockerfile.packages
target: data-transport-layer
push: true
tags: ethereumoptimism/data-transport-layer:${{ needs.release.outputs.data-transport-layer }},ethereumoptimism/data-transport-layer:latest
contracts:
name: Publish Deployer Version ${{ needs.release.outputs.contracts }}
needs: release
......
......@@ -23,8 +23,6 @@ packages/contracts-periphery/@openzeppelin*
packages/contracts-periphery/hardhat*
packages/contracts-periphery/forge-artifacts*
packages/data-transport-layer/db
packages/contracts-bedrock/deployments/devnetL1
packages/contracts-bedrock/deployments/anvil
......
......@@ -20,18 +20,10 @@
"directory": "packages/contracts-periphery",
"changeProcessCWD": true
},
{
"directory": "packages/data-transport-layer",
"changeProcessCWD": true
},
{
"directory": "packages/chain-mon",
"changeProcessCWD": true
},
{
"directory": "packages/message-relayer",
"changeProcessCWD": true
},
{
"directory": "packages/fault-detector",
"changeProcessCWD": true
......
......@@ -124,10 +124,8 @@ This will build the following containers:
* [`l1_chain`](https://hub.docker.com/r/ethereumoptimism/hardhat): simulated L1 chain using hardhat-evm as a backend
* [`deployer`](https://hub.docker.com/r/ethereumoptimism/deployer): process that deploys L1 smart contracts to the L1 chain
* [`dtl`](https://hub.docker.com/r/ethereumoptimism/data-transport-layer): service that indexes transaction data from the L1 chain
* [`l2geth`](https://hub.docker.com/r/ethereumoptimism/l2geth): L2 geth node running in Sequencer mode
* [`verifier`](https://hub.docker.com/r/ethereumoptimism/go-ethereum): L2 geth node running in Verifier mode
* [`relayer`](https://hub.docker.com/r/ethereumoptimism/message-relayer): helper process that relays messages between L1 and L2
If you want to make a change to a container, you'll need to take it down and rebuild it.
For example, if you make a change in l2geth:
......
......@@ -54,10 +54,8 @@ Refer to the Directory Structure section below to understand which packages are
│ ├── <a href="./packages/contracts-bedrock">contracts-bedrock</a>: Bedrock smart contracts.
│ ├── <a href="./packages/contracts-periphery">contracts-periphery</a>: Peripheral contracts for Optimism
│ ├── <a href="./packages/core-utils">core-utils</a>: Low-level utilities that make building Optimism easier
│ ├── <a href="./packages/data-transport-layer">data-transport-layer</a>: Service for indexing Optimism-related L1 data
│ ├── <a href="./packages/chain-mon">chain-mon</a>: Chain monitoring services
│ ├── <a href="./packages/fault-detector">fault-detector</a>: Service for detecting Sequencer faults
│ ├── <a href="./packages/message-relayer">message-relayer</a>: Tool for automatically relaying L1<>L2 messages in development
│ ├── <a href="./packages/replica-healthcheck">replica-healthcheck</a>: Service for monitoring the health of a replica node
│ └── <a href="./packages/sdk">sdk</a>: provides a set of tools for interacting with Optimism
├── <a href="./op-bindings">op-bindings</a>: Go bindings for Bedrock smart contracts.
......@@ -83,10 +81,8 @@ Refer to the Directory Structure section below to understand which packages are
│ ├── <a href="./packages/common-ts">common-ts</a>: Common tools for building apps in TypeScript
│ ├── <a href="./packages/contracts-periphery">contracts-periphery</a>: Peripheral contracts for Optimism
│ ├── <a href="./packages/core-utils">core-utils</a>: Low-level utilities that make building Optimism easier
│ ├── <a href="./packages/data-transport-layer">data-transport-layer</a>: Service for indexing Optimism-related L1 data
│ ├── <a href="./packages/chain-mon">chain-mon</a>: Chain monitoring services
│ ├── <a href="./packages/fault-detector">fault-detector</a>: Service for detecting Sequencer faults
│ ├── <a href="./packages/message-relayer">message-relayer</a>: Tool for automatically relaying L1<>L2 messages in development
│ ├── <a href="./packages/replica-healthcheck">replica-healthcheck</a>: Service for monitoring the health of a replica node
│ └── <a href="./packages/sdk">sdk</a>: provides a set of tools for interacting with Optimism
├── <a href="./indexer">indexer</a>: indexes and syncs transactions
......
......@@ -39,6 +39,5 @@ flag_management:
- name: dtl-tests
- name: chain-mon-tests
- name: fault-detector-tests
- name: message-relayer-tests
- name: replica-healthcheck-tests
- name: sdk-tests
......@@ -2,7 +2,6 @@ package api
import (
"encoding/json"
"log"
"net/http"
"github.com/ethereum-optimism/optimism/indexer/database"
......@@ -101,9 +100,6 @@ func NewApi(bv database.BridgeView) *Api {
}
func (a *Api) Listen(port string) {
err := http.ListenAndServe(port, a.Router)
if err != nil {
log.Fatal("Http server failed to start listening", err)
}
func (a *Api) Listen(port string) error {
return http.ListenAndServe(port, a.Router)
}
......@@ -5,6 +5,9 @@ import (
"errors"
"math/big"
"github.com/ethereum/go-ethereum/core/types"
"github.com/ethereum-optimism/optimism/op-bindings/bindings"
"github.com/ethereum-optimism/optimism/op-node/eth"
)
......@@ -15,38 +18,60 @@ var (
ErrInvalidBlockNumber = errors.New("invalid block number")
// ErrUnsupportedL2OOVersion is returned when the output version is not supported.
ErrUnsupportedL2OOVersion = errors.New("unsupported l2oo version")
// ErrInvalidOutputLogTopic is returned when the output log topic is invalid.
ErrInvalidOutputLogTopic = errors.New("invalid output log topic")
// ErrInvalidOutputTopicLength is returned when the output log topic length is invalid.
ErrInvalidOutputTopicLength = errors.New("invalid output log topic length")
)
// ParseOutputLog parses a log from the L2OutputOracle contract.
func (c *Challenger) ParseOutputLog(log *types.Log) (*bindings.TypesOutputProposal, error) {
// Check the length of log topics
if len(log.Topics) != 4 {
return nil, ErrInvalidOutputTopicLength
}
// Validate the first topic is the output log topic
if log.Topics[0] != c.l2ooABI.Events["OutputProposed"].ID {
return nil, ErrInvalidOutputLogTopic
}
l2BlockNumber := new(big.Int).SetBytes(log.Topics[3][:])
expected := log.Topics[1]
return &bindings.TypesOutputProposal{
L2BlockNumber: l2BlockNumber,
OutputRoot: eth.Bytes32(expected),
}, nil
}
// ValidateOutput checks that a given output is expected via a trusted rollup node rpc.
// It returns: if the output is correct, the fetched output, error
func (c *Challenger) ValidateOutput(ctx context.Context, l2BlockNumber *big.Int, expected eth.Bytes32) (bool, *eth.Bytes32, error) {
func (c *Challenger) ValidateOutput(ctx context.Context, proposal bindings.TypesOutputProposal) (bool, eth.Bytes32, error) {
// Fetch the output from the rollup node
ctx, cancel := context.WithTimeout(ctx, c.networkTimeout)
defer cancel()
output, err := c.rollupClient.OutputAtBlock(ctx, l2BlockNumber.Uint64())
output, err := c.rollupClient.OutputAtBlock(ctx, proposal.L2BlockNumber.Uint64())
if err != nil {
c.log.Error("Failed to fetch output", "blockNum", l2BlockNumber, "err", err)
return false, nil, err
c.log.Error("Failed to fetch output", "blockNum", proposal.L2BlockNumber, "err", err)
return false, eth.Bytes32{}, err
}
// Compare the output root to the expected output root
equalRoots, err := c.compareOutputRoots(output, expected, l2BlockNumber)
equalRoots, err := c.compareOutputRoots(output, proposal)
if err != nil {
return false, nil, err
return false, eth.Bytes32{}, err
}
return equalRoots, &output.OutputRoot, nil
return equalRoots, output.OutputRoot, nil
}
// compareOutputRoots compares the output root of the given block number to the expected output root.
func (c *Challenger) compareOutputRoots(received *eth.OutputResponse, expected eth.Bytes32, blockNumber *big.Int) (bool, error) {
func (c *Challenger) compareOutputRoots(received *eth.OutputResponse, expected bindings.TypesOutputProposal) (bool, error) {
if received.Version != supportedL2OutputVersion {
c.log.Error("Unsupported l2 output version", "version", received.Version)
return false, ErrUnsupportedL2OOVersion
}
if received.BlockRef.Number != blockNumber.Uint64() {
c.log.Error("Invalid blockNumber", "expected", blockNumber, "actual", received.BlockRef.Number)
if received.BlockRef.Number != expected.L2BlockNumber.Uint64() {
c.log.Error("Invalid blockNumber", "expected", expected.L2BlockNumber, "actual", received.BlockRef.Number)
return false, ErrInvalidBlockNumber
}
return received.OutputRoot == expected, nil
return received.OutputRoot == expected.OutputRoot, nil
}
......@@ -10,13 +10,55 @@ import (
"github.com/stretchr/testify/mock"
"github.com/stretchr/testify/require"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/core/types"
"github.com/ethereum/go-ethereum/crypto"
"github.com/ethereum/go-ethereum/log"
"github.com/ethereum-optimism/optimism/op-bindings/bindings"
"github.com/ethereum-optimism/optimism/op-challenger/metrics"
"github.com/ethereum-optimism/optimism/op-node/eth"
"github.com/ethereum-optimism/optimism/op-node/testlog"
)
func TestChallenger_OutputProposed_Signature(t *testing.T) {
computed := crypto.Keccak256Hash([]byte("OutputProposed(bytes32,uint256,uint256,uint256)"))
challenger := newTestChallenger(t, eth.OutputResponse{}, true)
expected := challenger.l2ooABI.Events["OutputProposed"].ID
require.Equal(t, expected, computed)
}
func TestParseOutputLog_Succeeds(t *testing.T) {
challenger := newTestChallenger(t, eth.OutputResponse{}, true)
expectedBlockNumber := big.NewInt(0x04)
expectedOutputRoot := [32]byte{0x02}
logTopic := challenger.l2ooABI.Events["OutputProposed"].ID
log := types.Log{
Topics: []common.Hash{logTopic, common.Hash(expectedOutputRoot), {0x03}, common.BigToHash(expectedBlockNumber)},
}
outputProposal, err := challenger.ParseOutputLog(&log)
require.NoError(t, err)
require.Equal(t, expectedBlockNumber, outputProposal.L2BlockNumber)
require.Equal(t, expectedOutputRoot, outputProposal.OutputRoot)
}
func TestParseOutputLog_WrongLogTopic_Errors(t *testing.T) {
challenger := newTestChallenger(t, eth.OutputResponse{}, true)
_, err := challenger.ParseOutputLog(&types.Log{
Topics: []common.Hash{{0x01}, {0x02}, {0x03}, {0x04}},
})
require.ErrorIs(t, err, ErrInvalidOutputLogTopic)
}
func TestParseOutputLog_WrongTopicLength_Errors(t *testing.T) {
challenger := newTestChallenger(t, eth.OutputResponse{}, true)
logTopic := challenger.l2ooABI.Events["OutputProposed"].ID
_, err := challenger.ParseOutputLog(&types.Log{
Topics: []common.Hash{logTopic, {0x02}, {0x03}},
})
require.ErrorIs(t, err, ErrInvalidOutputTopicLength)
}
func TestChallenger_ValidateOutput_RollupClientErrors(t *testing.T) {
output := eth.OutputResponse{
Version: supportedL2OutputVersion,
......@@ -26,9 +68,13 @@ func TestChallenger_ValidateOutput_RollupClientErrors(t *testing.T) {
challenger := newTestChallenger(t, output, true)
valid, received, err := challenger.ValidateOutput(context.Background(), big.NewInt(0), output.OutputRoot)
checked := bindings.TypesOutputProposal{
L2BlockNumber: big.NewInt(0),
OutputRoot: output.OutputRoot,
}
valid, received, err := challenger.ValidateOutput(context.Background(), checked)
require.False(t, valid)
require.Nil(t, received)
require.Equal(t, eth.Bytes32{}, received)
require.ErrorIs(t, err, mockOutputApiError)
}
......@@ -41,9 +87,13 @@ func TestChallenger_ValidateOutput_ErrorsWithWrongVersion(t *testing.T) {
challenger := newTestChallenger(t, output, false)
valid, received, err := challenger.ValidateOutput(context.Background(), big.NewInt(0), eth.Bytes32{})
checked := bindings.TypesOutputProposal{
L2BlockNumber: big.NewInt(0),
OutputRoot: output.OutputRoot,
}
valid, received, err := challenger.ValidateOutput(context.Background(), checked)
require.False(t, valid)
require.Nil(t, received)
require.Equal(t, eth.Bytes32{}, received)
require.ErrorIs(t, err, ErrUnsupportedL2OOVersion)
}
......@@ -56,9 +106,13 @@ func TestChallenger_ValidateOutput_ErrorsInvalidBlockNumber(t *testing.T) {
challenger := newTestChallenger(t, output, false)
valid, received, err := challenger.ValidateOutput(context.Background(), big.NewInt(1), output.OutputRoot)
checked := bindings.TypesOutputProposal{
L2BlockNumber: big.NewInt(1),
OutputRoot: output.OutputRoot,
}
valid, received, err := challenger.ValidateOutput(context.Background(), checked)
require.False(t, valid)
require.Nil(t, received)
require.Equal(t, eth.Bytes32{}, received)
require.ErrorIs(t, err, ErrInvalidBlockNumber)
}
......@@ -71,8 +125,12 @@ func TestOutput_ValidateOutput(t *testing.T) {
challenger := newTestChallenger(t, output, false)
valid, expected, err := challenger.ValidateOutput(context.Background(), big.NewInt(0), output.OutputRoot)
require.Equal(t, *expected, output.OutputRoot)
checked := bindings.TypesOutputProposal{
L2BlockNumber: big.NewInt(0),
OutputRoot: output.OutputRoot,
}
valid, expected, err := challenger.ValidateOutput(context.Background(), checked)
require.Equal(t, expected, output.OutputRoot)
require.True(t, valid)
require.NoError(t, err)
}
......@@ -86,7 +144,11 @@ func TestChallenger_CompareOutputRoots_ErrorsWithDifferentRoots(t *testing.T) {
challenger := newTestChallenger(t, output, false)
valid, err := challenger.compareOutputRoots(&output, output.OutputRoot, big.NewInt(0))
checked := bindings.TypesOutputProposal{
L2BlockNumber: big.NewInt(0),
OutputRoot: output.OutputRoot,
}
valid, err := challenger.compareOutputRoots(&output, checked)
require.False(t, valid)
require.ErrorIs(t, err, ErrUnsupportedL2OOVersion)
}
......@@ -100,7 +162,11 @@ func TestChallenger_CompareOutputRoots_ErrInvalidBlockNumber(t *testing.T) {
challenger := newTestChallenger(t, output, false)
valid, err := challenger.compareOutputRoots(&output, output.OutputRoot, big.NewInt(1))
checked := bindings.TypesOutputProposal{
L2BlockNumber: big.NewInt(1),
OutputRoot: output.OutputRoot,
}
valid, err := challenger.compareOutputRoots(&output, checked)
require.False(t, valid)
require.ErrorIs(t, err, ErrInvalidBlockNumber)
}
......@@ -114,11 +180,19 @@ func TestChallenger_CompareOutputRoots_Succeeds(t *testing.T) {
challenger := newTestChallenger(t, output, false)
valid, err := challenger.compareOutputRoots(&output, output.OutputRoot, big.NewInt(0))
checked := bindings.TypesOutputProposal{
L2BlockNumber: big.NewInt(0),
OutputRoot: output.OutputRoot,
}
valid, err := challenger.compareOutputRoots(&output, checked)
require.True(t, valid)
require.NoError(t, err)
valid, err = challenger.compareOutputRoots(&output, eth.Bytes32{0x01}, big.NewInt(0))
checked = bindings.TypesOutputProposal{
L2BlockNumber: big.NewInt(0),
OutputRoot: eth.Bytes32{0x01},
}
valid, err = challenger.compareOutputRoots(&output, checked)
require.False(t, valid)
require.NoError(t, err)
}
......@@ -127,11 +201,14 @@ func newTestChallenger(t *testing.T, output eth.OutputResponse, errors bool) *Ch
outputApi := newMockOutputApi(output, errors)
log := testlog.Logger(t, log.LvlError)
metr := metrics.NewMetrics("test")
parsedL2oo, err := bindings.L2OutputOracleMetaData.GetAbi()
require.NoError(t, err)
challenger := Challenger{
rollupClient: outputApi,
log: log,
metr: metr,
networkTimeout: time.Duration(5) * time.Second,
l2ooABI: parsedL2oo,
}
return &challenger
}
......
version: '3.4'
x-system-addr-env: &system-addr-env
# private key: a6aecc98b63bafb0de3b29ae9964b14acb4086057808be29f90150214ebd4a0f
# OK to publish this since it will only ever be used in itests
SYSTEM_ADDRESS_0_DEPLOYER: '0xa961b0d6dce82db098cf70a42a14add3ee3db2d5'
# private key: 3b8d2345102cce2443acb240db6e87c8edd4bb3f821b17fab8ea2c9da08ea132
# OK to publish this since it will only ever be used in itests
SYSTEM_ADDRESS_1_DEPLOYER: '0xdfc82d475833a50de90c642770f34a9db7deb725'
services:
# this is a helper service used because there's no official hardhat image
l1_chain:
image: ethereumoptimism/hardhat-node:${DOCKER_TAG_HARDHAT:-latest}
build:
context: ./docker/hardhat
dockerfile: Dockerfile
env_file:
- ./envs/l1_chain.env
ports:
# expose the service to the host for integration testing
- ${L1CHAIN_HTTP_PORT:-9545}:8545
deployer:
depends_on:
- l1_chain
build:
context: ..
dockerfile: ./ops/docker/Dockerfile.packages
target: deployer
image: ethereumoptimism/deployer:${DOCKER_TAG_DEPLOYER:-latest}
entrypoint: ./deployer.sh
environment:
# Env vars for the deployment script.
CONTRACTS_RPC_URL: http://l1_chain:8545
CONTRACTS_DEPLOYER_KEY: 'ac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80'
CONTRACTS_TARGET_NETWORK: 'local'
ports:
# expose the service to the host for getting the contract addrs
- ${DEPLOYER_PORT:-8080}:8081
dtl:
depends_on:
- l1_chain
- deployer
- l2geth
build:
context: ..
dockerfile: ./ops/docker/Dockerfile.packages
target: data-transport-layer
image: ethereumoptimism/data-transport-layer:${DOCKER_TAG_DATA_TRANSPORT_LAYER:-latest}
# override with the dtl script and the env vars required for it
entrypoint: ./dtl.sh
env_file:
- ./envs/dtl.env
# set the rest of the env vars for the network whcih do not
# depend on the docker-compose setup
environment:
# used for setting the address manager address
URL: http://deployer:8081/addresses.json
# connect to the 2 layers
DATA_TRANSPORT_LAYER__L1_RPC_ENDPOINT: http://l1_chain:8545
DATA_TRANSPORT_LAYER__L2_RPC_ENDPOINT: http://l2geth:8545
DATA_TRANSPORT_LAYER__SYNC_FROM_L2: 'true'
DATA_TRANSPORT_LAYER__L2_CHAIN_ID: 17
ports:
- ${DTL_PORT:-7878}:7878
l2geth:
depends_on:
- l1_chain
- deployer
build:
context: ..
dockerfile: ./l2geth/Dockerfile
image: ethereumoptimism/l2geth:${DOCKER_TAG_L2GETH:-latest}
# override with the geth script and the env vars required for it
entrypoint: sh ./geth.sh
env_file:
- ./envs/geth.env
environment:
<<: *system-addr-env
ETH1_HTTP: http://l1_chain:8545
ROLLUP_TIMESTAMP_REFRESH: 5s
ROLLUP_STATE_DUMP_PATH: http://deployer:8081/state-dump.latest.json
# connecting to the DTL
ROLLUP_CLIENT_HTTP: http://dtl:7878
ETH1_CTC_DEPLOYMENT_HEIGHT: 8
RETRIES: 60
# no need to keep this secret, only used internally to sign blocks
BLOCK_SIGNER_KEY: '6587ae678cf4fc9a33000cdbf9f35226b71dcc6a4684a31203241f9bcfd55d27'
BLOCK_SIGNER_ADDRESS: '0x00000398232E2064F896018496b4b44b3D62751F'
ROLLUP_ENFORCE_FEES: ${ROLLUP_ENFORCE_FEES:-true}
ROLLUP_FEE_THRESHOLD_DOWN: 0.9
ROLLUP_FEE_THRESHOLD_UP: 1.1
ports:
- ${L2GETH_HTTP_PORT:-8545}:8545
- ${L2GETH_WS_PORT:-8546}:8546
relayer:
depends_on:
- l1_chain
- l2geth
deploy:
replicas: 0
build:
context: ..
dockerfile: ./ops/docker/Dockerfile.packages
target: message-relayer
image: ethereumoptimism/message-relayer:${DOCKER_TAG_MESSAGE_RELAYER:-latest}
entrypoint: ./relayer.sh
environment:
MESSAGE_RELAYER__L1_RPC_PROVIDER: http://l1_chain:8545
MESSAGE_RELAYER__L2_RPC_PROVIDER: http://l2geth:8545
MESSAGE_RELAYER__L1_WALLET: '0xdbda1821b80551c9d65939329250298aa3472ba22feea921c0cf5d620ea67b97'
RETRIES: 60
fault_detector:
depends_on:
- l1_chain
- l2geth
deploy:
replicas: 0
build:
context: ..
dockerfile: ./ops/docker/Dockerfile.packages
target: fault-detector
image: ethereumoptimism/fault-detector:${DOCKER_TAG_FAULT_DETECTOR:-latest}
entrypoint: ./detector.sh
environment:
FAULT_DETECTOR__L1_RPC_PROVIDER: http://l1_chain:8545
FAULT_DETECTOR__L2_RPC_PROVIDER: http://l2geth:8545
RETRIES: 60
verifier:
depends_on:
- l1_chain
- deployer
- dtl
- l2geth
deploy:
replicas: 1
build:
context: ..
dockerfile: ./l2geth/Dockerfile
image: ethereumoptimism/l2geth:${DOCKER_TAG_L2GETH:-latest}
entrypoint: sh ./geth.sh
env_file:
- ./envs/geth.env
environment:
<<: *system-addr-env
ETH1_HTTP: http://l1_chain:8545
SEQUENCER_CLIENT_HTTP: http://l2geth:8545
ROLLUP_STATE_DUMP_PATH: http://deployer:8081/state-dump.latest.json
ROLLUP_CLIENT_HTTP: http://dtl:7878
ROLLUP_BACKEND: 'l1'
ETH1_CTC_DEPLOYMENT_HEIGHT: 8
RETRIES: 60
ROLLUP_VERIFIER_ENABLE: 'true'
ports:
- ${VERIFIER_HTTP_PORT:-8547}:8545
- ${VERIFIER_WS_PORT:-8548}:8546
replica:
depends_on:
- dtl
- l2geth
deploy:
replicas: 1
build:
context: ..
dockerfile: ./l2geth/Dockerfile
image: ethereumoptimism/l2geth:${DOCKER_TAG_L2GETH:-latest}
entrypoint: sh ./geth.sh
env_file:
- ./envs/geth.env
environment:
<<: *system-addr-env
ETH1_HTTP: http://l1_chain:8545
SEQUENCER_CLIENT_HTTP: http://l2geth:8545
ROLLUP_STATE_DUMP_PATH: http://deployer:8081/state-dump.latest.json
ROLLUP_CLIENT_HTTP: http://dtl:7878
ROLLUP_BACKEND: 'l2'
ROLLUP_VERIFIER_ENABLE: 'true'
ETH1_CTC_DEPLOYMENT_HEIGHT: 8
RETRIES: 60
ports:
- ${REPLICA_HTTP_PORT:-8549}:8545
- ${REPLICA_WS_PORT:-8550}:8546
replica_healthcheck:
depends_on:
- l2geth
- replica
deploy:
replicas: 0
build:
context: ..
dockerfile: ./ops/docker/Dockerfile.packages
target: replica-healthcheck
image: ethereumoptimism/replica-healthcheck:${DOCKER_TAG_REPLICA_HEALTHCHECK:-latest}
environment:
HEALTHCHECK__REFERENCE_RPC_PROVIDER: http://l2geth:8545
HEALTHCHECK__TARGET_RPC_PROVIDER: http://replica:8545
ports:
- ${HEALTHCHECK_HTTP_PORT:-7300}:7300
......@@ -88,17 +88,6 @@ FROM base as deployer-bedrock
WORKDIR /opt/optimism/packages/contracts-bedrock
CMD ["yarn", "run", "deploy"]
FROM base as data-transport-layer
WORKDIR /opt/optimism/packages/data-transport-layer
COPY ./ops/scripts/dtl.sh .
CMD ["node", "dist/src/services/run.js"]
FROM base as message-relayer
WORKDIR /opt/optimism/packages/message-relayer
COPY ./ops/scripts/relayer.sh .
CMD ["npm", "run", "start"]
FROM base as fault-detector
WORKDIR /opt/optimism/packages/fault-detector
......
......@@ -22,7 +22,7 @@ DRIPPIE_MON__DRIPPIE_ADDRESS=
# ↓ wd-mon ↓ #
###############################################################################
# RPCs pointing to a base chain and ptimism chain
# RPCs pointing to a base chain and Optimism chain
TWO_STEP_MONITOR__L1_RPC_PROVIDER=
TWO_STEP_MONITOR__L2_RPC_PROVIDER=
......
......@@ -2,17 +2,29 @@ import assert from 'assert'
import { DeployFunction } from 'hardhat-deploy/dist/types'
import '@eth-optimism/hardhat-deploy-config'
import { ethers } from 'ethers'
import { BigNumber } from 'ethers'
import { defaultResourceConfig } from '../src/constants'
import { assertContractVariable, deploy } from '../src/deploy-utils'
const uint128Max = ethers.BigNumber.from('0xffffffffffffffffffffffffffffffff')
const deployFn: DeployFunction = async (hre) => {
const batcherHash = hre.ethers.utils
.hexZeroPad(hre.deployConfig.batchSenderAddress, 32)
.toLowerCase()
const l2GenesisBlockGasLimit = BigNumber.from(
hre.deployConfig.l2GenesisBlockGasLimit
)
const l2GasLimitLowerBound = BigNumber.from(
defaultResourceConfig.systemTxMaxGas +
defaultResourceConfig.maxResourceLimit
)
if (l2GenesisBlockGasLimit.lt(l2GasLimitLowerBound)) {
throw new Error(
`L2 genesis block gas limit must be at least ${l2GasLimitLowerBound}`
)
}
await deploy({
hre,
name: 'SystemConfig',
......@@ -21,16 +33,9 @@ const deployFn: DeployFunction = async (hre) => {
hre.deployConfig.gasPriceOracleOverhead,
hre.deployConfig.gasPriceOracleScalar,
batcherHash,
hre.deployConfig.l2GenesisBlockGasLimit,
l2GenesisBlockGasLimit,
hre.deployConfig.p2pSequencerAddress,
{
maxResourceLimit: 20_000_000,
elasticityMultiplier: 10,
baseFeeMaxChangeDenominator: 8,
systemTxMaxGas: 1_000_000,
minimumBaseFee: ethers.utils.parseUnits('1', 'gwei'),
maximumBaseFee: uint128Max,
},
defaultResourceConfig,
],
postDeployAction: async (contract) => {
await assertContractVariable(
......@@ -56,12 +61,30 @@ const deployFn: DeployFunction = async (hre) => {
)
const config = await contract.resourceConfig()
assert(config.maxResourceLimit === 20_000_000)
assert(config.elasticityMultiplier === 10)
assert(config.baseFeeMaxChangeDenominator === 8)
assert(config.systemTxMaxGas === 1_000_000)
assert(ethers.utils.parseUnits('1', 'gwei').eq(config.minimumBaseFee))
assert(config.maximumBaseFee.eq(uint128Max))
assert(config.maxResourceLimit === defaultResourceConfig.maxResourceLimit)
assert(
config.elasticityMultiplier ===
defaultResourceConfig.elasticityMultiplier
)
assert(
config.baseFeeMaxChangeDenominator ===
defaultResourceConfig.baseFeeMaxChangeDenominator
)
assert(
BigNumber.from(config.systemTxMaxGas).eq(
defaultResourceConfig.systemTxMaxGas
)
)
assert(
BigNumber.from(config.minimumBaseFee).eq(
defaultResourceConfig.minimumBaseFee
)
)
assert(
BigNumber.from(config.maximumBaseFee).eq(
defaultResourceConfig.maximumBaseFee
)
)
},
})
}
......
import assert from 'assert'
import { ethers, BigNumber } from 'ethers'
import { ethers } from 'ethers'
import { DeployFunction } from 'hardhat-deploy/dist/types'
import { awaitCondition } from '@eth-optimism/core-utils'
import '@eth-optimism/hardhat-deploy-config'
......@@ -10,6 +10,7 @@ import {
getContractsFromArtifacts,
getDeploymentAddress,
} from '../src/deploy-utils'
import { defaultResourceConfig } from '../src/constants'
const deployFn: DeployFunction = async (hre) => {
const { deployer } = await hre.getNamedAccounts()
......@@ -102,16 +103,7 @@ const deployFn: DeployFunction = async (hre) => {
unsafeBlockSigner: hre.deployConfig.p2pSequencerAddress,
// The resource config is not exposed to the end user
// to simplify deploy config. It may be introduced in the future.
resourceConfig: {
maxResourceLimit: 20_000_000,
elasticityMultiplier: 10,
baseFeeMaxChangeDenominator: 8,
minimumBaseFee: ethers.utils.parseUnits('1', 'gwei'),
systemTxMaxGas: 1_000_000,
maximumBaseFee: BigNumber.from(
'0xffffffffffffffffffffffffffffffff'
).toString(),
},
resourceConfig: defaultResourceConfig,
},
}
......
import { ethers } from 'ethers'
/**
* Predeploys are Solidity contracts that are injected into the initial L2 state and provide
* various useful functions.
......@@ -26,3 +28,14 @@ export const predeploys = {
BaseFeeVault: '0x4200000000000000000000000000000000000019',
L1FeeVault: '0x420000000000000000000000000000000000001a',
}
const uint128Max = ethers.BigNumber.from('0xffffffffffffffffffffffffffffffff')
export const defaultResourceConfig = {
maxResourceLimit: 20_000_000,
elasticityMultiplier: 10,
baseFeeMaxChangeDenominator: 8,
minimumBaseFee: ethers.utils.parseUnits('1', 'gwei'),
systemTxMaxGas: 1_000_000,
maximumBaseFee: uint128Max,
}
ignores: [
"@babel/eslint-parser",
"@types/level",
"@typescript-eslint/parser",
"eslint-plugin-import",
"eslint-plugin-unicorn",
"eslint-plugin-jsdoc",
"eslint-plugin-prefer-arrow",
"eslint-plugin-react",
"@typescript-eslint/eslint-plugin",
"eslint-config-prettier",
"eslint-plugin-prettier",
"chai"
]
# General options
DATA_TRANSPORT_LAYER__NODE_ENV=development
# Leave blank during local development
DATA_TRANSPORT_LAYER__ETH_NETWORK_NAME=
DATA_TRANSPORT_LAYER__DB_PATH=./db
DATA_TRANSPORT_LAYER__ADDRESS_MANAGER=
DATA_TRANSPORT_LAYER__POLLING_INTERVAL=5000
DATA_TRANSPORT_LAYER__DANGEROUSLY_CATCH_ALL_ERRORS=true
DATA_TRANSPORT_LAYER__CONFIRMATIONS=12
# Server options
DATA_TRANSPORT_LAYER__SERVER_HOSTNAME=localhost
DATA_TRANSPORT_LAYER__SERVER_PORT=7878
# Set to "true" if you want to sync confirmed transactions from L1 (Ethereum).
# You probably want to set this to "true".
DATA_TRANSPORT_LAYER__SYNC_FROM_L1=true
DATA_TRANSPORT_LAYER__L1_RPC_ENDPOINT=
DATA_TRANSPORT_LAYER__LOGS_PER_POLLING_INTERVAL=2000
# Set to "true" if you want to sync unconfirmed transactions from a sequencer.
# Make sure to fill in the below values if you intend to do so.
DATA_TRANSPORT_LAYER__SYNC_FROM_L2=false
DATA_TRANSPORT_LAYER__L2_RPC_ENDPOINT=
DATA_TRANSPORT_LAYER__TRANSACTIONS_PER_POLLING_INTERVAL=1000
DATA_TRANSPORT_LAYER__L2_CHAIN_ID=69
DATA_TRANSPORT_LAYER__LEGACY_SEQUENCER_COMPATIBILITY=false
# Monitoring
# Leave the SENTRY_DSN variable unset during local development
DATA_TRANSPORT_LAYER__USE_SENTRY=
DATA_TRANSPORT_LAYER__SENTRY_DSN=
DATA_TRANSPORT_LAYER__SENTRY_TRACE_RATE=
DATA_TRANSPORT_LAYER__ENABLE_METRICS=
module.exports = {
extends: '../../.eslintrc.js',
}
module.exports = {
...require('../../.prettierrc.js'),
};
\ No newline at end of file
This diff is collapsed.
(The MIT License)
Copyright 2020-2021 Optimism
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
This diff is collapsed.
This diff is collapsed.
{
"private": true,
"name": "@eth-optimism/data-transport-layer",
"version": "0.5.56",
"description": "[Optimism] Service for shuttling data from L1 into L2",
"main": "dist/index",
"types": "dist/index",
"files": [
"dist/index"
],
"scripts": {
"clean": "rimraf ./dist ./tsconfig.tsbuildinfo",
"clean:db": "rimraf ./db",
"lint": "yarn run lint:fix && yarn run lint:check",
"lint:fix": "yarn lint:check --fix",
"lint:check": "eslint . --max-warnings=0",
"start": "ts-node ./src/services/run.ts",
"start:local": "ts-node ./src/services/run.ts | pino-pretty",
"test": "hardhat --config test/config/hardhat.config.ts test",
"test:coverage": "nyc hardhat --config test/config/hardhat.config.ts test && nyc merge .nyc_output coverage.json",
"build": "tsc -p tsconfig.json",
"pre-commit": "lint-staged"
},
"keywords": [
"optimism",
"ethereum",
"data",
"transport",
"layer"
],
"homepage": "https://github.com/ethereum-optimism/optimism/tree/develop/packages/data-transport-layer#readme",
"license": "MIT",
"author": "Optimism PBC",
"repository": {
"type": "git",
"url": "https://github.com/ethereum-optimism/optimism.git"
},
"dependencies": {
"@eth-optimism/common-ts": "0.8.1",
"@eth-optimism/contracts": "0.6.0",
"@eth-optimism/core-utils": "0.12.0",
"@ethersproject/providers": "^5.7.0",
"@ethersproject/transactions": "^5.7.0",
"@sentry/node": "^6.3.1",
"@sentry/tracing": "^6.3.1",
"@types/express": "^4.17.12",
"axios": "^0.21.1",
"bcfg": "^0.1.6",
"bfj": "^7.0.2",
"cors": "^2.8.5",
"dotenv": "^10.0.0",
"ethers": "^5.7.0",
"express": "^4.17.1",
"express-prom-bundle": "^6.3.6",
"level6": "npm:level@^6.0.1",
"levelup": "^4.4.0"
},
"devDependencies": {
"@types/cors": "^2.8.9",
"@types/levelup": "^4.3.0",
"@types/level": "^6.0.1",
"bfj": "^7.0.2",
"chai-as-promised": "^7.1.1",
"hardhat": "^2.9.6",
"mocha": "^8.4.0",
"pino-pretty": "^4.7.1",
"prettier": "^2.8.0",
"prom-client": "^13.1.0",
"rimraf": "^3.0.2",
"ts-node": "^10.9.1"
}
}
export * from './chain-constants'
export * from './patch-contexts'
export const PATCH_CONTEXTS = {
10: {
2817218: 1643139411,
2817287: 1643139718,
2817898: 1643140952,
2818512: 1643141859,
2818984: 1643142762,
2819864: 1643144275,
2820902: 1643146079,
2821157: 1643146389,
2821170: 1643146389,
2821339: 1643146689,
2821772: 1643147604,
2821814: 1643147909,
2821952: 1643147909,
2822262: 1643148824,
2822342: 1643149130,
2822425: 1643149130,
2822602: 1643149430,
2822742: 1643149733,
2822987: 1643150660,
2822999: 1643150660,
2823039: 1643150964,
2823046: 1643150964,
2823055: 1643150964,
2823096: 1643151269,
2823205: 1643151572,
2823260: 1643151572,
2823306: 1643151572,
2823322: 1643151572,
2823413: 1643151872,
2823419: 1643151872,
2823460: 1643151872,
2823561: 1643152174,
2823592: 1643152174,
2824036: 1643152774,
2824050: 1643153075,
2824107: 1643153075,
2824247: 1643153376,
2832642: 1643173416,
2835330: 1643181396,
2838173: 1643188371,
2838174: 1643188371,
2838175: 1643188371,
2840388: 1643192601,
2844171: 1643202366,
2845370: 1643204181,
2845931: 1643205096,
2846484: 1643205696,
2894118: 1643281866,
2894119: 1643281866,
2959506: 1643399826,
2967959: 1643419611,
2971530: 1643432181,
2974571: 1643443881,
2981176: 1643465226,
2984205: 1643470986,
2995760: 1643498166,
2996847: 1643501211,
2997086: 1643501811,
2997087: 1643501811,
2997569: 1643503026,
2998970: 1643506101,
3000041: 1643510376,
3000042: 1643510376,
3000973: 1643514306,
3001008: 1643514606,
3001009: 1643514606,
3002529: 1643520081,
3008446: 1643541501,
3009141: 1643543016,
3012287: 1643551521,
3012348: 1643551821,
3022052: 1643574336,
3042815: 1643624616,
3043000: 1643625516,
3060328: 1643656446,
3060471: 1643656746,
3064982: 1643667996,
3070655: 1643683461,
},
}
/* Imports: External */
import { LevelUp } from 'levelup'
import { BigNumber } from 'ethers'
export class SimpleDB {
constructor(public db: LevelUp) {}
public async get<TEntry>(key: string, index: number): Promise<TEntry | null> {
try {
// TODO: Better checks here.
return JSON.parse(await this.db.get(this._makeKey(key, index)))
} catch (err) {
return null
}
}
public async range<TEntry>(
key: string,
startIndex: number,
endIndex: number
): Promise<TEntry[] | []> {
try {
return new Promise<any[]>((resolve) => {
const entries: any[] = []
this.db
.createValueStream({
gte: this._makeKey(key, startIndex),
lt: this._makeKey(key, endIndex),
})
.on('data', (transaction: string) => {
entries.push(JSON.parse(transaction))
})
.on('error', () => {
resolve(null)
})
.on('close', () => {
// TODO: Close vs end? Need to double check later.
resolve(entries)
})
.on('end', () => {
resolve(entries)
})
})
} catch (err) {
return []
}
}
public async put<TEntry>(
entries: {
key: string
index: number
value: TEntry
}[]
): Promise<void> {
return this.db.batch(
entries.map((entry) => {
return {
type: 'put',
key: this._makeKey(entry.key, entry.index),
value: JSON.stringify(entry.value),
}
})
)
}
private _makeKey(key: string, index: number): string {
// prettier-ignore
return `${key}:${BigNumber.from(index).toString().padStart(32, '0')}`
}
}
This diff is collapsed.
export type EventName =
| 'TransactionEnqueued'
| 'SequencerBatchAppended'
| 'StateBatchAppended'
| 'SequencerBatchAppendedTransaction'
export class MissingElementError extends Error {
constructor(public name: EventName) {
super(`missing event: ${name}`)
}
}
/* Imports: External */
import { BigNumber, ethers, constants } from 'ethers'
import { serialize, Transaction } from '@ethersproject/transactions'
import { getContractFactory } from '@eth-optimism/contracts'
import {
toHexString,
toRpcHexString,
BatchType,
SequencerBatch,
} from '@eth-optimism/core-utils'
import { SequencerBatchAppendedEvent } from '@eth-optimism/contracts/dist/types/contracts/L1/rollup/CanonicalTransactionChain'
/* Imports: Internal */
import { MissingElementError } from './errors'
import {
DecodedSequencerBatchTransaction,
SequencerBatchAppendedExtraData,
SequencerBatchAppendedParsedEvent,
TransactionBatchEntry,
TransactionEntry,
EventHandlerSet,
} from '../../../types'
import { parseSignatureVParam } from '../../../utils'
export const handleEventsSequencerBatchAppended: EventHandlerSet<
SequencerBatchAppendedEvent,
SequencerBatchAppendedExtraData,
SequencerBatchAppendedParsedEvent
> = {
getExtraData: async (event, l1RpcProvider) => {
const l1Transaction = await event.getTransaction()
const eventBlock = await event.getBlock()
// TODO: We need to update our events so that we actually have enough information to parse this
// batch without having to pull out this extra event. For the meantime, we need to find this
// "TransactonBatchAppended" event to get the rest of the data.
const CanonicalTransactionChain = getContractFactory(
'CanonicalTransactionChain'
)
.attach(event.address)
.connect(l1RpcProvider)
const batchSubmissionEvent = (
await CanonicalTransactionChain.queryFilter(
CanonicalTransactionChain.filters.TransactionBatchAppended(),
eventBlock.number,
eventBlock.number
)
).find((foundEvent: ethers.Event) => {
// We might have more than one event in this block, so we specifically want to find a
// "TransactonBatchAppended" event emitted immediately before the event in question.
return (
foundEvent.transactionHash === event.transactionHash &&
foundEvent.logIndex === event.logIndex - 1
)
})
if (!batchSubmissionEvent) {
throw new Error(
`Well, this really shouldn't happen. A SequencerBatchAppended event doesn't have a corresponding TransactionBatchAppended event.`
)
}
return {
timestamp: eventBlock.timestamp,
blockNumber: eventBlock.number,
submitter: l1Transaction.from,
l1TransactionHash: l1Transaction.hash,
l1TransactionData: l1Transaction.data,
prevTotalElements: batchSubmissionEvent.args._prevTotalElements,
batchIndex: batchSubmissionEvent.args._batchIndex,
batchSize: batchSubmissionEvent.args._batchSize,
batchRoot: batchSubmissionEvent.args._batchRoot,
batchExtraData: batchSubmissionEvent.args._extraData,
}
},
parseEvent: (event, extraData, l2ChainId) => {
const transactionEntries: TransactionEntry[] = []
// 12 * 2 + 2 = 26
if (extraData.l1TransactionData.length < 26) {
throw new Error(
`Block ${extraData.blockNumber} transaction data is too small: ${extraData.l1TransactionData.length}`
)
}
// TODO: typings not working?
const decoded = (SequencerBatch as any).fromHex(extraData.l1TransactionData)
// Keep track of the CTC index
let transactionIndex = 0
// Keep track of the number of deposits
let enqueuedCount = 0
// Keep track of the tx index in the current batch
let index = 0
for (const context of decoded.contexts) {
for (let j = 0; j < context.numSequencedTransactions; j++) {
const buf = decoded.transactions[index]
if (!buf) {
throw new Error(
`Invalid batch context, tx count: ${decoded.transactions.length}, attempting to parse ${index}`
)
}
const tx = buf.toTransaction()
transactionEntries.push({
index: extraData.prevTotalElements
.add(BigNumber.from(transactionIndex))
.toNumber(),
batchIndex: extraData.batchIndex.toNumber(),
blockNumber: BigNumber.from(context.blockNumber).toNumber(),
timestamp: BigNumber.from(context.timestamp).toNumber(),
gasLimit: BigNumber.from(0).toString(),
target: constants.AddressZero,
origin: null,
data: serialize(
{
nonce: tx.nonce,
gasPrice: tx.gasPrice,
gasLimit: tx.gasLimit,
to: tx.to,
value: tx.value,
data: tx.data,
},
{
v: tx.v,
r: tx.r,
s: tx.s,
}
),
queueOrigin: 'sequencer',
value: toRpcHexString(tx.value),
queueIndex: null,
decoded: mapSequencerTransaction(tx, l2ChainId),
confirmed: true,
})
transactionIndex++
index++
}
for (let j = 0; j < context.numSubsequentQueueTransactions; j++) {
const queueIndex = event.args._startingQueueIndex.add(
BigNumber.from(enqueuedCount)
)
// Okay, so. Since events are processed in parallel, we don't know if the Enqueue
// event associated with this queue element has already been processed. So we'll ask
// the api to fetch that data for itself later on and we use fake values for some
// fields. The real TODO here is to make sure we fix this data structure to avoid ugly
// "dummy" fields.
transactionEntries.push({
index: extraData.prevTotalElements
.add(BigNumber.from(transactionIndex))
.toNumber(),
batchIndex: extraData.batchIndex.toNumber(),
blockNumber: BigNumber.from(0).toNumber(),
timestamp: context.timestamp,
gasLimit: BigNumber.from(0).toString(),
target: constants.AddressZero,
origin: constants.AddressZero,
data: '0x',
queueOrigin: 'l1',
value: '0x0',
queueIndex: queueIndex.toNumber(),
decoded: null,
confirmed: true,
})
enqueuedCount++
transactionIndex++
}
}
const transactionBatchEntry: TransactionBatchEntry = {
index: extraData.batchIndex.toNumber(),
root: extraData.batchRoot,
size: extraData.batchSize.toNumber(),
prevTotalElements: extraData.prevTotalElements.toNumber(),
extraData: extraData.batchExtraData,
blockNumber: BigNumber.from(extraData.blockNumber).toNumber(),
timestamp: BigNumber.from(extraData.timestamp).toNumber(),
submitter: extraData.submitter,
l1TransactionHash: extraData.l1TransactionHash,
type: BatchType[decoded.type],
}
return {
transactionBatchEntry,
transactionEntries,
}
},
storeEvent: async (entry, db) => {
// Defend against situations where we missed an event because the RPC provider
// (infura/alchemy/whatever) is missing an event.
if (entry.transactionBatchEntry.index > 0) {
const prevTransactionBatchEntry = await db.getTransactionBatchByIndex(
entry.transactionBatchEntry.index - 1
)
// We should *always* have a previous transaction batch here.
if (prevTransactionBatchEntry === null) {
throw new MissingElementError('SequencerBatchAppended')
}
}
// Same consistency checks but for transaction entries.
if (
entry.transactionEntries.length > 0 &&
entry.transactionEntries[0].index > 0
) {
const prevTransactionEntry = await db.getTransactionByIndex(
entry.transactionEntries[0].index - 1
)
// We should *always* have a previous transaction here.
if (prevTransactionEntry === null) {
throw new MissingElementError('SequencerBatchAppendedTransaction')
}
}
await db.putTransactionEntries(entry.transactionEntries)
// Add an additional field to the enqueued transactions in the database
// if they have already been confirmed
for (const transactionEntry of entry.transactionEntries) {
if (transactionEntry.queueOrigin === 'l1') {
await db.putTransactionIndexByQueueIndex(
transactionEntry.queueIndex,
transactionEntry.index
)
}
}
await db.putTransactionBatchEntries([entry.transactionBatchEntry])
},
}
const mapSequencerTransaction = (
tx: Transaction,
l2ChainId: number
): DecodedSequencerBatchTransaction => {
return {
nonce: BigNumber.from(tx.nonce).toString(),
gasPrice: BigNumber.from(tx.gasPrice).toString(),
gasLimit: BigNumber.from(tx.gasLimit).toString(),
value: toRpcHexString(tx.value),
target: tx.to ? toHexString(tx.to) : null,
data: toHexString(tx.data),
sig: {
v: parseSignatureVParam(tx.v, l2ChainId),
r: toHexString(tx.r),
s: toHexString(tx.s),
},
}
}
/* Imports: External */
import { StateBatchAppendedEvent } from '@eth-optimism/contracts/dist/types/contracts/L1/rollup/StateCommitmentChain'
import { getContractFactory } from '@eth-optimism/contracts'
import { BigNumber } from 'ethers'
/* Imports: Internal */
import { MissingElementError } from './errors'
import {
StateRootBatchEntry,
StateBatchAppendedExtraData,
StateBatchAppendedParsedEvent,
StateRootEntry,
EventHandlerSet,
} from '../../../types'
export const handleEventsStateBatchAppended: EventHandlerSet<
StateBatchAppendedEvent,
StateBatchAppendedExtraData,
StateBatchAppendedParsedEvent
> = {
getExtraData: async (event) => {
const eventBlock = await event.getBlock()
const l1Transaction = await event.getTransaction()
return {
timestamp: eventBlock.timestamp,
blockNumber: eventBlock.number,
submitter: l1Transaction.from,
l1TransactionHash: l1Transaction.hash,
l1TransactionData: l1Transaction.data,
}
},
parseEvent: (event, extraData) => {
const stateRoots = getContractFactory(
'StateCommitmentChain'
).interface.decodeFunctionData(
'appendStateBatch',
extraData.l1TransactionData
)[0]
const stateRootEntries: StateRootEntry[] = []
for (let i = 0; i < stateRoots.length; i++) {
stateRootEntries.push({
index: event.args._prevTotalElements.add(BigNumber.from(i)).toNumber(),
batchIndex: event.args._batchIndex.toNumber(),
value: stateRoots[i],
confirmed: true,
})
}
// Using .toNumber() here and in other places because I want to move everything to use
// BigNumber + hex, but that'll take a lot of work. This makes it easier in the future.
const stateRootBatchEntry: StateRootBatchEntry = {
index: event.args._batchIndex.toNumber(),
blockNumber: BigNumber.from(extraData.blockNumber).toNumber(),
timestamp: BigNumber.from(extraData.timestamp).toNumber(),
submitter: extraData.submitter,
size: event.args._batchSize.toNumber(),
root: event.args._batchRoot,
prevTotalElements: event.args._prevTotalElements.toNumber(),
extraData: event.args._extraData,
l1TransactionHash: extraData.l1TransactionHash,
type: 'LEGACY', // There is currently only 1 state root batch type
}
return {
stateRootBatchEntry,
stateRootEntries,
}
},
storeEvent: async (entry, db) => {
// Defend against situations where we missed an event because the RPC provider
// (infura/alchemy/whatever) is missing an event.
if (entry.stateRootBatchEntry.index > 0) {
const prevStateRootBatchEntry = await db.getStateRootBatchByIndex(
entry.stateRootBatchEntry.index - 1
)
// We should *always* have a previous batch entry here.
if (prevStateRootBatchEntry === null) {
throw new MissingElementError('StateBatchAppended')
}
}
await db.putStateRootBatchEntries([entry.stateRootBatchEntry])
await db.putStateRootEntries(entry.stateRootEntries)
},
}
/* Imports: External */
import { BigNumber } from 'ethers'
import { TransactionEnqueuedEvent } from '@eth-optimism/contracts/dist/types/contracts/L1/rollup/CanonicalTransactionChain'
/* Imports: Internal */
import { MissingElementError } from './errors'
import { EnqueueEntry, EventHandlerSet } from '../../../types'
export const handleEventsTransactionEnqueued: EventHandlerSet<
TransactionEnqueuedEvent,
null,
EnqueueEntry
> = {
getExtraData: async () => {
return null
},
parseEvent: (event) => {
return {
index: event.args._queueIndex.toNumber(),
target: event.args._target,
data: event.args._data,
gasLimit: event.args._gasLimit.toString(),
origin: event.args._l1TxOrigin,
blockNumber: BigNumber.from(event.blockNumber).toNumber(),
timestamp: event.args._timestamp.toNumber(),
ctcIndex: null,
}
},
storeEvent: async (entry, db) => {
// Defend against situations where we missed an event because the RPC provider
// (infura/alchemy/whatever) is missing an event.
if (entry.index > 0) {
const prevEnqueueEntry = await db.getEnqueueByIndex(entry.index - 1)
// We should *alwaus* have a previous enqueue entry here.
if (prevEnqueueEntry === null) {
throw new MissingElementError('TransactionEnqueued')
}
}
await db.putEnqueueEntries([entry])
},
}
export type EventName = 'SequencerTransaction'
export class MissingElementError extends Error {
constructor(public name: EventName) {
super(`missing event: ${name}`)
}
}
/* Imports: External */
import { BigNumber, ethers } from 'ethers'
import { serialize } from '@ethersproject/transactions'
import { padHexString } from '@eth-optimism/core-utils'
/* Imports: Internal */
import { TransportDB } from '../../../db/transport-db'
import {
DecodedSequencerBatchTransaction,
StateRootEntry,
TransactionEntry,
} from '../../../types'
import { parseSignatureVParam } from '../../../utils'
import { MissingElementError } from './errors'
export const handleSequencerBlock = {
parseBlock: async (
block: any,
chainId: number
): Promise<{
transactionEntry: TransactionEntry
stateRootEntry: StateRootEntry
}> => {
const transaction = block.transactions[0]
const transactionIndex =
BigNumber.from(transaction.blockNumber).toNumber() - 1
// We make the assumption that you don't need to sync the genesis block
if (transactionIndex < 0) {
throw new Error('should not happen, attempted to sync genesis block')
}
let transactionEntry: Partial<TransactionEntry> = {
// Legacy support.
index: transactionIndex,
value: transaction.value,
batchIndex: null,
blockNumber: BigNumber.from(transaction.l1BlockNumber).toNumber(),
timestamp: BigNumber.from(transaction.l1Timestamp).toNumber(),
queueOrigin: transaction.queueOrigin,
confirmed: false,
}
if (transaction.queueOrigin === 'sequencer') {
const decodedTransaction: DecodedSequencerBatchTransaction = {
sig: {
v: parseSignatureVParam(transaction.v, chainId),
r: padHexString(transaction.r, 32),
s: padHexString(transaction.s, 32),
},
value: transaction.value,
gasLimit: BigNumber.from(transaction.gas).toString(),
gasPrice: BigNumber.from(transaction.gasPrice).toString(),
nonce: BigNumber.from(transaction.nonce).toString(),
target: transaction.to,
data: transaction.input,
}
transactionEntry = {
...transactionEntry,
gasLimit: BigNumber.from(0).toString(),
target: ethers.constants.AddressZero,
origin: null,
data: serialize(
{
value: transaction.value,
gasLimit: transaction.gas,
gasPrice: transaction.gasPrice,
nonce: transaction.nonce,
to: transaction.to,
data: transaction.input,
chainId,
},
{
v: BigNumber.from(transaction.v).toNumber(),
r: padHexString(transaction.r, 32),
s: padHexString(transaction.s, 32),
}
),
decoded: decodedTransaction,
queueIndex: null,
}
} else {
transactionEntry = {
...transactionEntry,
gasLimit: BigNumber.from(transaction.gas).toString(),
target: ethers.utils.getAddress(transaction.to),
origin: ethers.utils.getAddress(transaction.l1TxOrigin),
data: transaction.input,
decoded: null,
queueIndex:
transaction.queueIndex === null ||
transaction.queueIndex === undefined
? BigNumber.from(transaction.nonce).toNumber()
: BigNumber.from(transaction.queueIndex).toNumber(),
}
}
const stateRootEntry: StateRootEntry = {
index: transactionIndex,
batchIndex: null,
value: block.stateRoot,
confirmed: false,
}
return {
transactionEntry: transactionEntry as TransactionEntry, // Not the cleanest thing in the world. Could be improved.
stateRootEntry,
}
},
storeBlock: async (
entry: {
transactionEntry: TransactionEntry
stateRootEntry: StateRootEntry
},
db: TransportDB
): Promise<void> => {
if (entry.transactionEntry.index > 0) {
const prevTransactionEntry = await db.getUnconfirmedTransactionByIndex(
entry.transactionEntry.index - 1
)
// We should *always* have a previous transaction here.
if (prevTransactionEntry === null) {
throw new MissingElementError('SequencerTransaction')
}
}
// Having separate indices for confirmed/unconfirmed means we never have to worry about
// accidentally overwriting a confirmed transaction with an unconfirmed one. Unconfirmed
// transactions are purely extra information.
await db.putUnconfirmedTransactionEntries([entry.transactionEntry])
await db.putUnconfirmedStateRootEntries([entry.stateRootEntry])
},
}
/* Imports: External */
import { BaseService, LegacyMetrics } from '@eth-optimism/common-ts'
import { LevelUp } from 'levelup'
import level from 'level6'
import { Counter } from 'prom-client'
/* Imports: Internal */
import { L1IngestionService } from '../l1-ingestion/service'
import { L1TransportServer } from '../server/service'
import { validators } from '../../utils'
import { L2IngestionService } from '../l2-ingestion/service'
import { BSS_HF1_INDEX } from '../../config'
export interface L1DataTransportServiceOptions {
nodeEnv: string
ethNetworkName?: 'mainnet' | 'kovan' | 'goerli'
release: string
addressManager: string
confirmations: number
dangerouslyCatchAllErrors?: boolean
hostname: string
l1RpcProvider: string
l1RpcProviderUser?: string
l1RpcProviderPassword?: string
l2ChainId: number
l2RpcProvider: string
l2RpcProviderUser?: string
l2RpcProviderPassword?: string
l1SyncShutoffBlock?: number
metrics?: LegacyMetrics
dbPath: string
logsPerPollingInterval: number
pollingInterval: number
port: number
syncFromL1?: boolean
syncFromL2?: boolean
transactionsPerPollingInterval: number
legacySequencerCompatibility: boolean
useSentry?: boolean
sentryDsn?: string
sentryTraceRate?: number
defaultBackend: string
l1GasPriceBackend: string
l1StartHeight?: number
}
const optionSettings = {
syncFromL1: {
default: true,
validate: validators.isBoolean,
},
syncFromL2: {
default: false,
validate: validators.isBoolean,
},
}
// prettier-ignore
export class L1DataTransportService extends BaseService<L1DataTransportServiceOptions> {
constructor(options: L1DataTransportServiceOptions) {
super('L1_Data_Transport_Service', options, optionSettings)
}
private state: {
db: LevelUp
l1IngestionService?: L1IngestionService
l2IngestionService?: L2IngestionService
l1TransportServer: L1TransportServer
metrics: LegacyMetrics
failureCounter: Counter<string>
} = {} as any
protected async _init(): Promise<void> {
this.logger.info('Initializing L1 Data Transport Service...')
this.state.db = level(this.options.dbPath)
await this.state.db.open()
// BSS HF1 activates at block 0 if not specified.
const bssHf1Index = BSS_HF1_INDEX[this.options.l2ChainId] || 0
this.logger.info(`L2 chain ID is: ${this.options.l2ChainId}`)
this.logger.info(`BSS HF1 will activate at: ${bssHf1Index}`)
this.state.metrics = new LegacyMetrics({
labels: {
environment: this.options.nodeEnv,
network: this.options.ethNetworkName,
release: this.options.release,
service: this.name,
}
})
this.state.metrics.client.collectDefaultMetrics({
prefix: 'data_transport_layer_'
})
this.state.failureCounter = new this.state.metrics.client.Counter({
name: 'data_transport_layer_main_service_failures',
help: 'Counts the number of times that the main service fails',
registers: [this.state.metrics.registry],
})
this.state.l1TransportServer = new L1TransportServer({
...this.options,
metrics: this.state.metrics,
db: this.state.db,
})
// Optionally enable sync from L1.
if (this.options.syncFromL1) {
this.state.l1IngestionService = new L1IngestionService({
...this.options,
metrics: this.state.metrics,
db: this.state.db,
})
}
// Optionally enable sync from L2.
if (this.options.syncFromL2) {
this.state.l2IngestionService = new L2IngestionService({
...(this.options as any), // TODO: Correct thing to do here is to assert this type.
metrics: this.state.metrics,
db: this.state.db,
})
}
await this.state.l1TransportServer.init()
if (this.options.syncFromL1) {
await this.state.l1IngestionService.init()
}
if (this.options.syncFromL2) {
await this.state.l2IngestionService.init()
}
}
protected async _start(): Promise<void> {
try {
await Promise.all([
this.state.l1TransportServer.start(),
this.options.syncFromL1 ? this.state.l1IngestionService.start() : null,
this.options.syncFromL2 ? this.state.l2IngestionService.start() : null,
])
} catch (e) {
this.state.failureCounter.inc()
throw e
}
}
protected async _stop(): Promise<void> {
try {
await Promise.all([
this.state.l1TransportServer.stop(),
this.options.syncFromL1 ? this.state.l1IngestionService.stop() : null,
this.options.syncFromL2 ? this.state.l2IngestionService.stop() : null,
])
await this.state.db.close()
} catch (e) {
this.state.failureCounter.inc()
throw e
}
}
}
This diff is collapsed.
This diff is collapsed.
export * from './api-types'
export * from './database-types'
export * from './event-handler-types'
This diff is collapsed.
/* Imports: External */
import { ethers } from 'ethers'
export const parseSignatureVParam = (
v: number | ethers.BigNumber | string,
chainId: number
): number => {
v = ethers.BigNumber.from(v).toNumber()
// Handle unprotected transactions
if (v === 27 || v === 28) {
return v
}
// Handle EIP155 transactions
return v - 2 * chainId - 35
}
export * from './contracts'
export * from './validation'
export * from './eth-tx'
This diff is collapsed.
import { HardhatUserConfig } from 'hardhat/config'
const config: HardhatUserConfig = {
// All paths relative to ** this file **.
paths: {
tests: '../../test',
cache: '../temp/cache',
artifacts: '../temp/artifacts',
},
}
export default config
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment