Commit 08dea724 authored by Mark Tyneway's avatar Mark Tyneway Committed by GitHub

Merge pull request #2396 from ethereum-optimism/develop

Develop -> Master
parents 0ac233e7 3e5b2092
---
'@eth-optimism/data-transport-layer': patch
---
Patch for Kovan DTL halting issue
---
'@eth-optimism/l2geth': patch
---
Skip TestWSAttachWelcome
---
'@eth-optimism/common-ts': patch
---
Adds hard stop to BaseServiceV2 when multiple exit signals are received
---
'@eth-optimism/l2geth': patch
---
Skip some geth console tests that flake in CI
---
'@eth-optimism/gas-oracle': patch
---
Fixes a bug that would cause the service to crash on startup if the RPC URLs were not immediately available
---
'@eth-optimism/integration-tests': patch
---
Update chainid
---
'@eth-optimism/data-transport-layer': patch
---
Add new metrics to the data-transport-layer
---
'@eth-optimism/integration-tests': patch
'@eth-optimism/common-ts': patch
'@eth-optimism/contracts': patch
'@eth-optimism/core-utils': patch
'@eth-optimism/data-transport-layer': patch
'@eth-optimism/message-relayer': patch
'@eth-optimism/replica-healthcheck': patch
'@eth-optimism/sdk': patch
---
Update Dockerfile to use Alpine
---
'@eth-optimism/common-ts': patch
---
Have BaseServiceV2 throw when options are undefined
---
'@eth-optimism/common-ts': patch
---
Adds the jsonRpcProvider validator as an input validator
---
'@eth-optimism/l2geth': patch
---
Adds a flag for changing the genesis fetch timeout
---
'@eth-optimism/contracts': patch
'@eth-optimism/data-transport-layer': patch
'@eth-optimism/integration-tests': patch
'@eth-optimism/message-relayer': patch
'@eth-optimism/sdk': patch
---
Bump to hardhat@2.9.1
---
'@eth-optimism/hardhat-node': patch
---
Bump to hardhat@2.9.1
......@@ -208,23 +208,23 @@ jobs:
steps:
- restore_cache:
keys:
- v1-source-{{ .Branch }}-{{ .Revision }}
- v1-source-{{ .Branch }}
- v2-cache-source-{{ .Branch }}-{{ .Revision }}
- v2-cache-source-{{ .Branch }}
- checkout
- save_cache:
key: v1-source-{{ .Branch }}-{{ .Revision }}
key: v2-cache-source-{{ .Branch }}-{{ .Revision }}
paths:
- ".git"
- restore_cache:
keys:
- v1-yarn-install-{{ checksum "yarn.lock" }}
- v1-yarn-install
- v2-cache-yarn-install-{{ checksum "yarn.lock" }}
- v2-cache-yarn-install
- run:
name: Install dependencies
command: yarn --frozen-lockfile
- save_cache:
key: v1-yarn-install-{{ checksum "yarn.lock" }}
paths:
key: v2-cache-yarn-install-{{ checksum "yarn.lock" }}
paths:
- node_modules
- packages/common-ts/node_modules
- packages/contracts/node_modules
......@@ -238,7 +238,7 @@ jobs:
name: Build monorepo
command: yarn build
- save_cache:
key: v1-yarn-build-{{ .Revision }}
key: v2-cache-yarn-build-{{ .Revision }}
paths:
- "."
......@@ -248,7 +248,7 @@ jobs:
steps:
- restore_cache:
keys:
- v1-yarn-build-{{ .Revision }}
- v2-cache-yarn-build-{{ .Revision }}
- checkout
- run:
name: Run Slither
......@@ -262,12 +262,16 @@ jobs:
steps:
- restore_cache:
keys:
- v1-yarn-build-{{ .Revision }}
- v2-cache-yarn-build-{{ .Revision }}
- checkout
- run:
name: Lint
command: yarn lint:check
working_directory: packages/contracts
- run:
name: Check deployment docs
command: yarn autogen:markdown && git diff --exit-code
working_directory: packages/contracts
- run:
name: Slither
command: yarn test:slither
......@@ -283,7 +287,7 @@ jobs:
steps:
- restore_cache:
keys:
- v1-yarn-build-{{ .Revision }}
- v2-cache-yarn-build-{{ .Revision }}
- checkout
- run:
name: Test
......@@ -306,7 +310,7 @@ jobs:
steps:
- restore_cache:
keys:
- v1-yarn-build-{{ .Revision }}
- v2-cache-yarn-build-{{ .Revision }}
- checkout
# Note: The below needs to be manually configured whenever we
# add a new package to CI.
......@@ -362,7 +366,7 @@ jobs:
name: Bring up the stack
command: |
docker-compose build --progress=plain
docker-compose up -d --scale replica-healthcheck=1
docker-compose up -d --scale replica_healthcheck=1
working_directory: ops
- run:
name: Wait for sequencer
......@@ -385,7 +389,7 @@ jobs:
steps:
- restore_cache:
keys:
- v1-yarn-build-{{ .Revision }}
- v2-cache-yarn-build-{{ .Revision }}
- checkout
- run:
name: Lint
......
go/bss-core @cfromknecht @tynes
go/batch-submitter @cfromknecht @tynes
go/gas-oracle @tynes
go/l2geth-exporter @optimisticben @mslipper
go/op-exporter @optimisticben @mslipper
go/proxyd @mslipper @inphi
go/teleportr @mslipper @cfromknecht
integration-tests/ @tynes @mslipper
packages/core-utils @smartcontracts @tynes
packages/common-ts/ @smartcontracts
packages/message-relayer/ @smartcontracts
packages/data-transport-layer/ @tynes @smartcontracts
packages/replica-healthcheck @optimisticben @tynes
packages/sdk @smartcontracts @mslipper
packages/contracts @elenadimitrova @maurelian @smartcontracts
l2geth @tynes @cfromknecht @smartcontracts
ops @tynes @optimisticben @mslipper
......@@ -10,7 +10,7 @@
- 'patches/**/*'
M-ci:
- any: ['.github/**/*']
- any: ['.github/**/*', '.circleci/**/*']
M-l2geth:
- any: ['l2geth/**/*']
......@@ -35,3 +35,9 @@ M-sdk:
M-ops:
- any: ['ops/**/*']
C-Protocol-Critical:
- any:
- 'packages/data-transport-layer/**/*.ts'
- 'packages/contracts/**/*.sol'
- 'l2geth/**/*.go'
\ No newline at end of file
queue_rules:
- name: default
conditions: []
pull_request_rules:
- name: Automatic merge on approval
conditions:
- or:
- and:
- "label!=SR-Risk"
- "label!=C-Protocol-Critical"
- "#approved-reviews-by>=2"
- and:
- "label=SR-Risk"
- "#approved-reviews-by>=2"
- "approved-reviews-by=maurelian"
- and:
- "label=C-Protocol-Critical"
- "#approved-reviews-by>=2"
- or:
- "approved-reviews-by=tynes"
- "approved-reviews-by=smartcontracts"
actions:
queue:
name: default
method: squash
comment:
message: |
This PR has been added to the merge queue, and will be merged soon.
label:
add:
- on-merge-train
- name: Handle security critical PRs
conditions:
- "label=SR-Risk"
actions:
request_reviews:
users:
- "maurelian"
comment:
message: |
Hey there @{{author}}! You flagged this PR as security critical. To make review easier, please add a comment describing
1. The risks present in this PR.
2. The mitigations you have added to try and reduce those risks.
- name: Request reviewers
conditions:
- -closed
actions:
request_reviews:
users:
- cfromknecht
- tynes
- mslipper
- inphi
- tuxcanfly
- smartcontracts
random_count: 2
- name: Request protocol critical reviewers
conditions:
- label=C-Protocol-Critical
actions:
request_reviews:
users:
- tynes
- smartcontracts
random_count: 1
- name: Ask to resolve conflict
conditions:
- conflict
actions:
comment:
message: Hey @{{author}}! This PR has merge conflicts. Please fix them before continuing review.
- name: Notify author when added to merge queue
conditions:
- "check-pending=Queue: Embarked in merge train"
actions:
comment:
message: |
This PR is next in line to be merged, and will be merged as soon as checks pass.
\ No newline at end of file
name: batch-submitter unit tests
on:
push:
paths:
- 'go/batch-submitter/**'
branches:
- 'master'
- 'develop'
- '*rc'
- 'release/*'
pull_request:
branches:
- '*'
workflow_dispatch:
defaults:
run:
working-directory: './go/batch-submitter'
jobs:
tests:
runs-on: ubuntu-latest
steps:
- name: Install Go
uses: actions/setup-go@v2
with:
go-version: 1.16.x
- name: Checkout code
uses: actions/checkout@v2
- name: Install
run: make
- name: Test
run: make test
name: bss-core unit tests
on:
push:
paths:
- 'go/bss-core/**'
branches:
- 'master'
- 'develop'
- '*rc'
- 'release/*'
pull_request:
branches:
- '*'
workflow_dispatch:
defaults:
run:
working-directory: './go/bss-core'
jobs:
tests:
runs-on: ubuntu-latest
steps:
- name: Install Go
uses: actions/setup-go@v2
with:
go-version: 1.16.x
- name: Checkout code
uses: actions/checkout@v2
- name: Test
run: go test -v ./...
name: Exteral Tests (Synthetix)
on: workflow_dispatch
on:
schedule:
# run these tests once per day
- cron: '0 0 * * *'
jobs:
integration:
......@@ -69,7 +72,6 @@ jobs:
if: failure()
uses: jwalton/gh-docker-logs@v1
with:
images: 'ethereumoptimism/hardhat,ethereumoptimism/deployer,ethereumoptimism/data-transport-layer,ethereumoptimism/l2geth,ethereumoptimism/message-relayer,ethereumoptimism/batch-submitter,ethereumoptimism/l2geth,ethereumoptimism/integration-tests'
dest: '~/logs'
- name: Tar logs
......
name: gas-oracle unit tests
on:
push:
paths:
- 'go/gas-oracle/**'
branches:
- 'master'
- 'develop'
- '*rc'
- 'release/*'
pull_request:
branches:
- '*'
workflow_dispatch:
defaults:
run:
working-directory: ./go/gas-oracle
jobs:
tests:
runs-on: ubuntu-latest
steps:
- name: Install Go
uses: actions/setup-go@v2
with:
go-version: 1.16.x
- name: Checkout code
uses: actions/checkout@v2
- name: Install
run: make
- name: Test
run: make test
name: geth unit tests
on:
push:
paths:
- 'l2geth/**'
branches:
- 'master'
- 'develop'
- '*rc'
- 'release/*'
pull_request:
paths:
- 'l2geth/**'
workflow_dispatch:
defaults:
run:
working-directory: ./l2geth
jobs:
lint:
runs-on: ubuntu-latest
steps:
- name: Install Go
uses: actions/setup-go@v2
with:
go-version: 1.15.x
- name: Checkout code
uses: actions/checkout@v2
- name: Lint
run: make lint
tests:
runs-on: ubuntu-latest
steps:
- name: Install Go
uses: actions/setup-go@v2
with:
go-version: 1.15.x
- name: Checkout code
uses: actions/checkout@v2
- name: Install
run: make
- name: Test
run: make test
name: golangci-lint
on:
push:
paths:
- 'go/gas-oracle/**'
- 'go/batch-submitter/**'
- 'go/bss-core/**'
- 'go/teleportr/**'
branches:
- 'master'
- 'develop'
- '*rc'
- 'release/*'
pull_request:
branches:
- '*'
jobs:
golangci:
name: lint
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: golangci-lint gas-oracle
uses: golangci/golangci-lint-action@v2
with:
version: v1.29
working-directory: go/gas-oracle
- name: golangci-lint batch-submitter
uses: golangci/golangci-lint-action@v2
with:
version: v1.29
working-directory: go/batch-submitter
- name: golangci-lint bss-core
uses: golangci/golangci-lint-action@v2
with:
version: v1.29
working-directory: go/bss-core
- name: golangci-lint teleportr
uses: golangci/golangci-lint-action@v2
with:
version: v1.29
working-directory: go/teleportr
name: indexer unit tests
on:
push:
paths:
- 'go/indexer/**'
branches:
- 'master'
- 'develop'
- '*rc'
- 'release/*'
pull_request:
branches:
- '*'
workflow_dispatch:
defaults:
run:
working-directory: './go/indexer'
jobs:
tests:
runs-on: ubuntu-latest
steps:
- name: Install Go
uses: actions/setup-go@v2
with:
go-version: 1.16.x
- name: Checkout code
uses: actions/checkout@v2
- name: Install
run: make
- name: Test
run: make test
name: integration
on:
push:
branches:
- 'master'
- 'develop'
- '*rc'
- 'release/*'
pull_request:
workflow_dispatch:
jobs:
integration:
runs-on: ubuntu-latest
services:
registry:
image: registry:2
ports:
- 5000:5000
strategy:
matrix:
batch-type:
- zlib
- legacy
env:
DOCKER_BUILDKIT: 1
COMPOSE_DOCKER_CLI_BUILD: 1
steps:
# Monorepo tests
- uses: actions/checkout@v2
- name: Get yarn cache directory path
id: yarn-cache-dir-path
run: echo "::set-output name=dir::$(yarn cache dir)"
- uses: actions/cache@v2
id: yarn-cache
with:
path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
restore-keys: |
${{ runner.os }}-yarn-
- name: Set conditional env vars
run: |
echo "BATCH_SUBMITTER_SEQUENCER_BATCH_TYPE=${{ matrix.batch-type }}" >> $GITHUB_ENV
- name: Bring the stack up
working-directory: ./ops
run: |
./scripts/stats.sh &
docker-compose -f docker-compose.yml up -d --scale replica-healthcheck=1
- name: Wait for the Sequencer node
working-directory: ./ops
run: ./scripts/wait-for-sequencer.sh
- name: Run the integration tests
working-directory: ./ops
run: docker-compose run integration_tests
- name: Collect docker logs on failure
if: failure()
uses: jwalton/gh-docker-logs@v1
with:
dest: '/home/runner/logs'
- name: Tar logs
if: failure()
run: tar cvzf ./logs.tgz ~/logs
- name: Upload logs to GitHub
if: failure()
uses: actions/upload-artifact@master
with:
name: logs.tgz
path: ./logs.tgz
name: proxyd unit tests
on:
push:
branches:
- 'master'
- 'develop'
pull_request:
branches:
- '*'
workflow_dispatch:
defaults:
run:
working-directory: ./go/proxyd
jobs:
tests:
runs-on: ubuntu-latest
steps:
- name: Install Go
uses: actions/setup-go@v2
with:
go-version: 1.15.x
- name: Checkout code
uses: actions/checkout@v2
- name: Build
run: make proxyd
- name: Lint
run: make lint
- name: Test
run: make test
name: Static analysis
on:
push:
branches:
- master
- develop
pull_request:
workflow_dispatch:
env:
PYTEST_ADDOPTS: "--color=yes"
jobs:
slither:
name: Slither run
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Fetch history
run: git fetch
- name: Setup node
uses: actions/setup-node@v1
with:
node-version: '16.x'
- name: Get yarn cache directory path
id: yarn-cache-dir-path
run: echo "::set-output name=dir::$(yarn cache dir)"
- uses: actions/cache@v2
id: yarn-cache
with:
path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
restore-keys: |
${{ runner.os }}-yarn-
- name: Install Dependencies
# only install dependencies if there was a change in the deps
# if: steps.yarn-cache.outputs.cache-hit != 'true'
run: yarn install
- name: Build
run: yarn build
- name: Set up Python 3.8
uses: actions/setup-python@v2
with:
python-version: '3.8'
- name: Install Slither
run: pip3 install slither-analyzer
- name: Run analysis
working-directory: ./packages/contracts
shell: bash
run: yarn test:slither
continue-on-error: false
name: teleportr unit tests
on:
push:
paths:
- 'go/teleportr/**'
branches:
- 'master'
- 'develop'
- '*rc'
- 'release/*'
pull_request:
branches:
- '*'
workflow_dispatch:
defaults:
run:
working-directory: './go/teleportr'
jobs:
tests:
runs-on: ubuntu-latest
services:
postgres:
image: postgres
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: password
ports:
- 5432:5432
steps:
- name: Install Go
uses: actions/setup-go@v2
with:
go-version: 1.16.x
- name: Checkout code
uses: actions/checkout@v2
- name: Test
run: go test -v ./...
name: typescript / contracts
on:
push:
branches:
- 'master'
- 'develop'
- '*rc'
- 'release/*'
pull_request:
workflow_dispatch:
jobs:
test:
name: Run unit tests
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Fetch history
run: git fetch
- name: Setup node
uses: actions/setup-node@v1
with:
node-version: '16.x'
- name: Get yarn cache directory path
id: yarn-cache-dir-path
run: echo "::set-output name=dir::$(yarn cache dir)"
- uses: actions/cache@v2
id: yarn-cache
with:
path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
restore-keys: |
${{ runner.os }}-yarn-
- name: Install Dependencies
# only install dependencies if there was a change in the deps
# if: steps.yarn-cache.outputs.cache-hit != 'true'
run: yarn install
- name: Check yarn.lock for changes
run: git diff --exit-code
- name: Build
run: yarn build
- name: Test
run: yarn test
env:
FORCE_COLOR: 1
ENABLE_GAS_REPORT: 1
- name: Print gas report
run: cat packages/contracts/gas-report.txt
- name: Run codechecks
working-directory: ./packages/contracts
run: yarn codechecks
env:
CC_SECRET: ${{ secrets.CC_SECRET }}
test-coverage:
name: Generate test coverage
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Fetch history
run: git fetch
- name: Setup node
uses: actions/setup-node@v1
with:
node-version: '16.x'
- name: Get yarn cache directory path
id: yarn-cache-dir-path
run: echo "::set-output name=dir::$(yarn cache dir)"
- uses: actions/cache@v2
id: yarn-cache
with:
path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
restore-keys: |
${{ runner.os }}-yarn-
- name: Install Dependencies
# only install dependencies if there was a change in the deps
# if: steps.yarn-cache.outputs.cache-hit != 'true'
run: yarn install
- name: Build
run: yarn build
- name: Test Coverage
run: yarn test:coverage
- uses: codecov/codecov-action@v1
with:
files: ./packages/contracts/coverage.json
fail_ci_if_error: true
verbose: true
flags: contracts
- uses: codecov/codecov-action@v1
with:
files: ./packages/core-utils/coverage.json
fail_ci_if_error: false
verbose: true
flags: core-utils
- uses: codecov/codecov-action@v1
with:
files: ./packages/data-transport-layer/coverage.json
fail_ci_if_error: false
verbose: true
flags: data-transport-layer
- uses: codecov/codecov-action@v1
with:
files: ./packages/message-relayer/coverage.json
fail_ci_if_error: false
verbose: true
flags: message-relayer
- uses: codecov/codecov-action@v1
with:
files: ./packages/sdk/coverage.json
fail_ci_if_error: false
verbose: true
flags: sdk
depcheck:
name: Check for unused dependencies
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Fetch history
run: git fetch
- name: Setup node
uses: actions/setup-node@v1
with:
node-version: '16.x'
- name: Get yarn cache directory path
id: yarn-cache-dir-path
run: echo "::set-output name=dir::$(yarn cache dir)"
- uses: actions/cache@v2
id: yarn-cache
with:
path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
restore-keys: |
${{ runner.os }}-yarn-
- name: Install Dependencies
# only install dependencies if there was a change in the deps
# if: steps.yarn-cache.outputs.cache-hit != 'true'
run: yarn install
- name: Check packages/contracts
working-directory: ./packages/contracts
run: npx depcheck
- name: Check packages/core-utils
working-directory: ./packages/core-utils
run: npx depcheck
- name: Check packages/data-transport-layer
working-directory: ./packages/data-transport-layer
run: npx depcheck
- name: Check packages/message-relayer
working-directory: ./packages/message-relayer
run: npx depcheck
- name: Check packages/sdk
working-directory: ./packages/sdk
run: npx depcheck
- name: Check integration-tests
working-directory: ./integration-tests
run: npx depcheck
lint:
name: Linting
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Fetch history
run: git fetch
- uses: actions/setup-node@v1
with:
node-version: '16.x'
- name: Get yarn cache directory path
id: yarn-cache-dir-path
run: echo "::set-output name=dir::$(yarn cache dir)"
- uses: actions/cache@v2
id: yarn-cache
with:
path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
restore-keys: |
${{ runner.os }}-yarn-
- name: Install Dependencies
# only install dependencies if there was a change in the deps
# if: steps.yarn-cache.outputs.cache-hit != 'true'
run: yarn install
- name: Lint
run: yarn lint:check
......@@ -187,11 +187,15 @@ func NewGasPriceOracle(cfg *Config) (*GasPriceOracle, error) {
}
// Ensure that we can actually connect to both backends
log.Info("Connecting to layer two")
if err := ensureConnection(l2Client); err != nil {
log.Error("Unable to connect to layer two", "addr", cfg.layerTwoHttpUrl)
log.Error("Unable to connect to layer two")
return nil, err
}
log.Info("Connecting to layer one")
if err := ensureConnection(l1Client); err != nil {
log.Error("Unable to connect to layer one", "addr", cfg.ethereumHttpUrl)
log.Error("Unable to connect to layer one")
return nil, err
}
address := cfg.gasPriceOracleAddress
......@@ -315,14 +319,18 @@ func NewGasPriceOracle(cfg *Config) (*GasPriceOracle, error) {
// Ensure that we can actually connect
func ensureConnection(client *ethclient.Client) error {
t := time.NewTicker(5 * time.Second)
t := time.NewTicker(1 * time.Second)
retries := 0
defer t.Stop()
for ; true; <-t.C {
_, err := client.ChainID(context.Background())
if err == nil {
break
} else {
return err
retries += 1
if retries > 90 {
return err
}
}
}
return nil
......
......@@ -34,7 +34,8 @@ services:
- ../scripts/:/scripts/
<<: *logging
replica-healthcheck:
image: ethereumoptimism/replica-healthcheck:${HC_IMAGE_TAG:-latest}
# TODO: Update this to latest when we fix the environment variables
image: ethereumoptimism/replica-healthcheck:${HC_IMAGE_TAG:-0.3.11}
restart: ${RESTART}
env_file:
- ${SHARED_ENV_PATH}/replica-healthcheck.env
......
......@@ -63,7 +63,7 @@
"eslint-plugin-unicorn": "^32.0.1",
"ethereum-waffle": "^3.3.0",
"ethers": "^5.5.4",
"hardhat": "^2.3.0",
"hardhat": "^2.9.2",
"hardhat-gas-reporter": "^1.0.4",
"lint-staged": "11.0.0",
"mocha": "^8.4.0",
......
......@@ -38,7 +38,7 @@ const procEnv = cleanEnv(process.env, {
L1_URL: str({ default: 'http://localhost:9545' }),
L1_POLLING_INTERVAL: num({ default: 10 }),
L2_CHAINID: num({ default: 987 }),
L2_CHAINID: num({ default: 17 }),
L2_GAS_PRICE: gasPriceValidator({
default: 'onchain',
}),
......
......@@ -56,6 +56,7 @@ var (
ArgsUsage: "<genesisPathOrUrl> (<genesisHash>)",
Flags: []cli.Flag{
utils.DataDirFlag,
utils.RollupGenesisTimeoutSecondsFlag,
},
Category: "BLOCKCHAIN COMMANDS",
Description: `
......@@ -65,7 +66,7 @@ participating.
It expects either a path or an HTTP URL to the genesis file as an argument. If an
HTTP URL is specified for the genesis file, then a hex-encoded SHA256 hash of the
genesis file must be included as a second argument. The hash provided on the CLI
genesis file must be included as a second argument. The hash provided on the CLI
will be checked against the hash of the genesis file downloaded from the URL.`,
}
dumpChainCfgCommand = cli.Command{
......@@ -236,7 +237,7 @@ func initGenesis(ctx *cli.Context) error {
log.Info("Fetching genesis file", "url", genesisPathOrURL)
genesisData, err := fetchGenesis(genesisPathOrURL)
genesisData, err := fetchGenesis(genesisPathOrURL, time.Duration(ctx.GlobalInt(utils.RollupGenesisTimeoutSecondsFlag.Name)))
if err != nil {
utils.Fatalf("Failed to fetch genesis file: %v", err)
}
......@@ -640,9 +641,9 @@ func hashish(x string) bool {
return err != nil
}
func fetchGenesis(url string) ([]byte, error) {
func fetchGenesis(url string, timeout time.Duration) ([]byte, error) {
client := &http.Client{
Timeout: 60 * time.Second,
Timeout: timeout,
}
resp, err := client.Get(url)
if err != nil {
......
......@@ -71,6 +71,7 @@ at block: 0 ({{niltime}})
// Tests that a console can be attached to a running node via various means.
func TestIPCAttachWelcome(t *testing.T) {
t.Skip()
// Configure the instance for IPC attachement
coinbase := "0x8605cdbbdb6d264aa742e77020dcbc58fcdce182"
var ipc string
......@@ -98,6 +99,7 @@ func TestIPCAttachWelcome(t *testing.T) {
}
func TestHTTPAttachWelcome(t *testing.T) {
t.Skip()
coinbase := "0x8605cdbbdb6d264aa742e77020dcbc58fcdce182"
port := strconv.Itoa(trulyRandInt(1024, 65536)) // Yeah, sometimes this will fail, sorry :P
geth := runGeth(t,
......@@ -114,6 +116,7 @@ func TestHTTPAttachWelcome(t *testing.T) {
}
func TestWSAttachWelcome(t *testing.T) {
t.Skip()
coinbase := "0x8605cdbbdb6d264aa742e77020dcbc58fcdce182"
port := strconv.Itoa(trulyRandInt(1024, 65536)) // Yeah, sometimes this will fail, sorry :P
......
......@@ -163,6 +163,7 @@ var (
utils.RollupEnforceFeesFlag,
utils.RollupFeeThresholdDownFlag,
utils.RollupFeeThresholdUpFlag,
utils.RollupGenesisTimeoutSecondsFlag,
utils.SequencerClientHttpFlag,
utils.TxPublisherEnableFlag,
utils.TxPublisherProjectIDFlag,
......
......@@ -77,6 +77,7 @@ var AppHelpFlagGroups = []flagGroup{
utils.RollupEnforceFeesFlag,
utils.RollupFeeThresholdDownFlag,
utils.RollupFeeThresholdUpFlag,
utils.RollupGenesisTimeoutSecondsFlag,
utils.SequencerClientHttpFlag,
utils.TxPublisherEnableFlag,
utils.TxPublisherProjectIDFlag,
......
......@@ -862,6 +862,12 @@ var (
Usage: "Allow txs with fees above the current fee up to this amount, must be > 1",
EnvVar: "ROLLUP_FEE_THRESHOLD_UP",
}
RollupGenesisTimeoutSecondsFlag = cli.DurationFlag{
Name: "rollup.genesistimeoutseconds",
Usage: "Timeout for the genesis file to be fetched",
Value: time.Second * 60,
EnvVar: "ROLLUP_GENESIS_TIMEOUT_SECONDS",
}
SequencerClientHttpFlag = cli.StringFlag{
Name: "sequencer.clienthttp",
Usage: "HTTP endpoint for the sequencer client",
......
......@@ -3,12 +3,11 @@ version: '3.4'
x-system-addr-env: &system-addr-env
# private key: a6aecc98b63bafb0de3b29ae9964b14acb4086057808be29f90150214ebd4a0f
# OK to publish this since it will only ever be used in itests
SYSTEM_ADDRESS_0_DEPLOYER: "0xa961b0d6dce82db098cf70a42a14add3ee3db2d5"
SYSTEM_ADDRESS_0_DEPLOYER: '0xa961b0d6dce82db098cf70a42a14add3ee3db2d5'
# private key: 3b8d2345102cce2443acb240db6e87c8edd4bb3f821b17fab8ea2c9da08ea132
# OK to publish this since it will only ever be used in itests
SYSTEM_ADDRESS_1_DEPLOYER: "0xdfc82d475833a50de90c642770f34a9db7deb725"
SYSTEM_ADDRESS_1_DEPLOYER: '0xdfc82d475833a50de90c642770f34a9db7deb725'
services:
# this is a helper service used because there's no official hardhat image
......@@ -64,7 +63,7 @@ services:
DATA_TRANSPORT_LAYER__L1_RPC_ENDPOINT: http://l1_chain:8545
DATA_TRANSPORT_LAYER__L2_RPC_ENDPOINT: http://l2geth:8545
DATA_TRANSPORT_LAYER__SYNC_FROM_L2: 'true'
DATA_TRANSPORT_LAYER__L2_CHAIN_ID: 987
DATA_TRANSPORT_LAYER__L2_CHAIN_ID: 17
ports:
- ${DTL_PORT:-7878}:7878
......@@ -174,7 +173,7 @@ services:
- ${REPLICA_HTTP_PORT:-8549}:8545
- ${REPLICA_WS_PORT:-8550}:8546
replica-healthcheck:
replica_healthcheck:
depends_on:
- l2geth
- replica
......@@ -203,7 +202,7 @@ services:
environment:
L1_URL: http://l1_chain:8545
L2_URL: http://l2geth:8545
HEALTHCHECK_URL: http://replica-healthcheck:7300/metrics
HEALTHCHECK_URL: http://replica_healthcheck:7300/metrics
REPLICA_URL: http://replica:8545
VERIFIER_URL: http://verifier:8545
URL: http://deployer:8081/addresses.json
......@@ -211,11 +210,11 @@ services:
NO_NETWORK: 1
BATCH_SUBMITTER_SEQUENCER_BATCH_TYPE: ${BATCH_SUBMITTER_SEQUENCER_BATCH_TYPE:-zlib}
RUN_SYSTEM_ADDRESS_TESTS: "true"
RUN_SYSTEM_ADDRESS_TESTS: 'true'
# must match l2geth environment, see above for why it's safe to publish these
SYSTEM_ADDRESS_0_DEPLOYER_KEY: "a6aecc98b63bafb0de3b29ae9964b14acb4086057808be29f90150214ebd4a0f"
SYSTEM_ADDRESS_1_DEPLOYER_KEY: "3b8d2345102cce2443acb240db6e87c8edd4bb3f821b17fab8ea2c9da08ea132"
SYSTEM_ADDRESS_0_DEPLOYER_KEY: 'a6aecc98b63bafb0de3b29ae9964b14acb4086057808be29f90150214ebd4a0f'
SYSTEM_ADDRESS_1_DEPLOYER_KEY: '3b8d2345102cce2443acb240db6e87c8edd4bb3f821b17fab8ea2c9da08ea132'
gas_oracle:
deploy:
......@@ -224,9 +223,9 @@ services:
context: ..
dockerfile: ./ops/docker/Dockerfile.gas-oracle
image: ethereumoptimism/gas-oracle:${DOCKER_TAG_GAS_ORACLE:-latest}
entrypoint: ./gas-oracle.sh
environment:
GAS_PRICE_ORACLE_ETHEREUM_HTTP_URL: http://l2geth:8545
GAS_PRICE_ORACLE_ETHEREUM_HTTP_URL: http://l1_chain:8545
GAS_PRICE_ORACLE_LAYER_TWO_HTTP_URL: http://l2geth:8545
# Default hardhat account 5
GAS_PRICE_ORACLE_PRIVATE_KEY: '0x8b3a350cf5c34c9194ca85829a2df0ec3153be0318b5e2d3348e872092edffba'
......
......@@ -11,5 +11,4 @@ RUN apk add --no-cache ca-certificates jq curl
COPY --from=builder /gas-oracle/gas-oracle /usr/local/bin/
WORKDIR /usr/local/bin/
COPY ./ops/scripts/gas-oracle.sh .
ENTRYPOINT ["gas-oracle"]
......@@ -2,16 +2,9 @@
# be used to build any of the follow-on services
#
# ### BASE: Install deps
# We do not use Alpine because there's a regression causing it to be very slow
# when used with typescript/hardhat: https://github.com/nomiclabs/hardhat/issues/1219
FROM node:16.13-buster-slim as base
FROM node:16-alpine3.14 as base
RUN apt-get update -y && apt-get install -y --no-install-recommends git \
curl \
jq \
python3 \
ca-certificates \
&& rm -rf /var/lib/apt/lists/*
RUN apk --no-cache add curl jq python3 ca-certificates git make gcc musl-dev linux-headers bash
# copy over the needed configs to run the dep installation
# note: this approach can be a bit unhandy to maintain, but it allows
......
const isForkModeEnabled = !!process.env.FORK_URL
const forkUrl = process.env.FORK_URL
const forkStartingBlock = parseInt(process.env.FORK_STARTING_BLOCK) || undefined
const gasPrice = parseInt(process.env.GAS_PRICE) || 0
const forkStartingBlock =
parseInt(process.env.FORK_STARTING_BLOCK, 10) || undefined
const gasPrice = parseInt(process.env.GAS_PRICE, 10) || 0
const config = {
networks: {
hardhat: {
gasPrice,
initialBaseFeePerGas: 0
initialBaseFeePerGas: 0,
},
},
analytics: { enabled: false },
......
......@@ -6,6 +6,6 @@
},
"license": "MIT",
"dependencies": {
"hardhat": "^2.7.0"
"hardhat": "^2.9.2"
}
}
......@@ -20,12 +20,12 @@ WS_PORT=8546
WS_API=eth,net,rollup,web3
WS_ORIGINS=*
CHAIN_ID=987
CHAIN_ID=17
DATADIR=/root/.ethereum
GASPRICE=0
GCMODE=archive
IPC_DISABLE=true
NETWORK_ID=987
NETWORK_ID=17
NO_USB=true
NO_DISCOVER=true
TARGET_GAS_LIMIT=15000000
......
#!/bin/sh
RETRIES=${RETRIES:-40}
if [[ -z $GAS_PRICE_ORACLE_ETHEREUM_HTTP_URL ]]; then
echo "Must set env GAS_PRICE_ORACLE_ETHEREUM_HTTP_URL"
exit 1
fi
# waits for l2geth to be up
curl --fail \
--show-error \
--silent \
--retry-connrefused \
--retry $RETRIES \
--retry-delay 1 \
--output /dev/null \
$GAS_PRICE_ORACLE_ETHEREUM_HTTP_URL
exec gas-oracle "$@"
......@@ -8,7 +8,7 @@ do
sleep 1
if [ $i -eq $RETRIES ]; then
echo 'Timed out waiting for sequencer'
break
exit 1
fi
echo 'Waiting for sequencer...'
((i=i+1))
......
......@@ -218,6 +218,16 @@ export abstract class BaseServiceV2<
return acc
}, {}) as TOptions
// Make sure all options are defined.
for (const [optionName, optionSpec] of Object.entries(params.optionsSpec)) {
if (
optionSpec.default === undefined &&
this.options[optionName] === undefined
) {
throw new Error(`missing required option: ${optionName}`)
}
}
// Create the metrics objects.
this.metrics = Object.keys(params.metricsSpec || {}).reduce((acc, key) => {
const spec = params.metricsSpec[key]
......@@ -237,11 +247,26 @@ export abstract class BaseServiceV2<
this.logger = new Logger({ name: params.name })
// Gracefully handle stop signals.
const maxSignalCount = 3
let currSignalCount = 0
const stop = async (signal: string) => {
this.logger.info(`stopping service with signal`, { signal })
await this.stop()
process.exit(0)
// Allow exiting fast if more signals are received.
currSignalCount++
if (currSignalCount === 1) {
this.logger.info(`stopping service with signal`, { signal })
await this.stop()
process.exit(0)
} else if (currSignalCount >= maxSignalCount) {
this.logger.info(`performing hard stop`)
process.exit(0)
} else {
this.logger.info(
`send ${maxSignalCount - currSignalCount} more signal(s) to hard stop`
)
}
}
// Handle stop signals.
process.on('SIGTERM', stop)
process.on('SIGINT', stop)
}
......
......@@ -18,6 +18,19 @@ const provider = makeValidator<Provider>((input) => {
return new ethers.providers.JsonRpcProvider(parsed)
})
const jsonRpcProvider = makeValidator<ethers.providers.JsonRpcProvider>(
(input) => {
const parsed = url()._parse(input)
return new ethers.providers.JsonRpcProvider(parsed)
}
)
const staticJsonRpcProvider =
makeValidator<ethers.providers.StaticJsonRpcProvider>((input) => {
const parsed = url()._parse(input)
return new ethers.providers.StaticJsonRpcProvider(parsed)
})
const wallet = makeValidator<Signer>((input) => {
if (!ethers.utils.isHexString(input)) {
throw new Error(`expected wallet to be a hex string`)
......@@ -37,4 +50,6 @@ export const validators = {
json,
wallet,
provider,
jsonRpcProvider,
staticJsonRpcProvider,
}
......@@ -4,7 +4,7 @@ const config: DeployConfig = {
network: 'local',
l1BlockTimeSeconds: 15,
l2BlockGasLimit: 15_000_000,
l2ChainId: 987,
l2ChainId: 17,
ctcL2GasDiscountDivisor: 32,
ctcEnqueueGasCost: 60_000,
sccFaultProofWindowSeconds: 0,
......
......@@ -95,6 +95,16 @@ StateCommitmentChain
</a>
</td>
</tr>
<tr>
<td>
TeleportrDeposit
</td>
<td align="center">
<a href="https://kovan.etherscan.io/address/0x4821975ca220601c153d02353300d6ad34adc362">
<code>0x4821975ca220601c153d02353300d6ad34adc362</code>
</a>
</td>
</tr>
</table>
## Layer 2 Contracts
......
......@@ -100,7 +100,7 @@
"ethereum-waffle": "^3.3.0",
"ethers": "^5.5.4",
"glob": "^7.1.6",
"hardhat": "^2.3.0",
"hardhat": "^2.9.2",
"hardhat-deploy": "^0.9.3",
"hardhat-gas-reporter": "^1.0.4",
"hardhat-output-validator": "^0.1.18",
......
......@@ -26,9 +26,9 @@ describe('OVM_GasPriceOracle', () => {
await signer1.getAddress()
)
OVM_GasPriceOracle.setOverhead(2750)
OVM_GasPriceOracle.setScalar(1500000)
OVM_GasPriceOracle.setDecimals(6)
await OVM_GasPriceOracle.setOverhead(2750)
await OVM_GasPriceOracle.setScalar(1500000)
await OVM_GasPriceOracle.setDecimals(6)
})
describe('owner', () => {
......
......@@ -75,7 +75,7 @@
"eslint-plugin-prettier": "^3.4.0",
"eslint-plugin-react": "^7.24.0",
"eslint-plugin-unicorn": "^32.0.1",
"hardhat": "^2.3.0",
"hardhat": "^2.9.2",
"lint-staged": "11.0.0",
"mocha": "^8.4.0",
"pino-pretty": "^4.7.1",
......
......@@ -27,6 +27,7 @@ const TRANSPORT_DB_KEYS = {
STARTING_L1_BLOCK: `l1:starting`,
HIGHEST_L2_BLOCK: `l2:highest`,
HIGHEST_SYNCED_BLOCK: `synced:highest`,
CONSISTENCY_CHECK: `consistency:checked`,
}
interface Indexed {
......@@ -202,6 +203,20 @@ export class TransportDB {
return this.db.get<number>(TRANSPORT_DB_KEYS.HIGHEST_L2_BLOCK, 0)
}
public async getConsistencyCheckFlag(): Promise<boolean> {
return this.db.get<boolean>(TRANSPORT_DB_KEYS.CONSISTENCY_CHECK, 0)
}
public async putConsistencyCheckFlag(flag: boolean): Promise<void> {
return this.db.put<boolean>([
{
key: TRANSPORT_DB_KEYS.CONSISTENCY_CHECK,
index: 0,
value: flag,
},
])
}
public async putHighestL2BlockNumber(
block: number | BigNumber
): Promise<void> {
......
export type EventName = 'SequencerTransaction'
export class MissingElementError extends Error {
constructor(public name: EventName) {
super(`missing event: ${name}`)
}
}
......@@ -11,6 +11,7 @@ import {
TransactionEntry,
} from '../../../types'
import { parseSignatureVParam } from '../../../utils'
import { MissingElementError } from './errors'
export const handleSequencerBlock = {
parseBlock: async (
......@@ -22,9 +23,12 @@ export const handleSequencerBlock = {
}> => {
const transaction = block.transactions[0]
const transactionIndex =
transaction.index === null || transaction.index === undefined
? BigNumber.from(transaction.blockNumber).toNumber() - 1
: BigNumber.from(transaction.index).toNumber()
BigNumber.from(transaction.blockNumber).toNumber() - 1
// We make the assumption that you don't need to sync the genesis block
if (transactionIndex < 0) {
throw new Error('should not happen, attempted to sync genesis block')
}
let transactionEntry: Partial<TransactionEntry> = {
// Legacy support.
......@@ -111,6 +115,17 @@ export const handleSequencerBlock = {
},
db: TransportDB
): Promise<void> => {
if (entry.transactionEntry.index > 0) {
const prevTransactionEntry = await db.getUnconfirmedTransactionByIndex(
entry.transactionEntry.index - 1
)
// We should *always* have a previous transaction here.
if (prevTransactionEntry === null) {
throw new MissingElementError('SequencerTransaction')
}
}
// Having separate indices for confirmed/unconfirmed means we never have to worry about
// accidentally overwriting a confirmed transaction with an unconfirmed one. Unconfirmed
// transactions are purely extra information.
......
......@@ -6,7 +6,7 @@ import { BigNumber } from 'ethers'
import { LevelUp } from 'levelup'
import axios from 'axios'
import bfj from 'bfj'
import { Gauge } from 'prom-client'
import { Gauge, Histogram } from 'prom-client'
/* Imports: Internal */
import { handleSequencerBlock } from './handlers/transaction'
......@@ -16,6 +16,7 @@ import { L1DataTransportServiceOptions } from '../main/service'
interface L2IngestionMetrics {
highestSyncedL2Block: Gauge<string>
fetchBlocksRequestTime: Histogram<string>
}
const registerMetrics = ({
......@@ -27,6 +28,12 @@ const registerMetrics = ({
help: 'Highest Synced L2 Block Number',
registers: [registry],
}),
fetchBlocksRequestTime: new client.Histogram({
name: 'data_transport_layer_fetch_blocks_time',
help: 'Amount of time fetching remote L2 blocks takes',
buckets: [0.1, 5, 15, 50, 100, 500],
registers: [registry],
}),
})
export interface L2IngestionServiceOptions
......@@ -100,7 +107,58 @@ export class L2IngestionService extends BaseService<L2IngestionServiceOptions> {
: this.options.l2RpcProvider
}
protected async ensure(): Promise<void> {
let retries = 0
while (true) {
try {
await this.state.l2RpcProvider.getNetwork()
break
} catch (e) {
retries++
this.logger.info(`Cannot connect to L2, retrying ${retries}/20`)
if (retries >= 20) {
this.logger.info('Cannot connect to L2, shutting down')
await this.stop()
process.exit()
}
await sleep(1000 * retries)
}
}
}
protected async checkConsistency(): Promise<void> {
const network = await this.state.l2RpcProvider.getNetwork()
const shouldDoCheck = !(await this.state.db.getConsistencyCheckFlag())
if (shouldDoCheck && network.chainId === 69) {
this.logger.info('performing consistency check')
const highestBlock =
await this.state.db.getHighestSyncedUnconfirmedBlock()
for (let i = 0; i < highestBlock; i++) {
const block = await this.state.db.getUnconfirmedTransactionByIndex(i)
if (block === null) {
this.logger.info('resetting to null block', {
index: i,
})
await this.state.db.setHighestSyncedUnconfirmedBlock(i)
break
}
// Log some progress so people know what's goin on.
if (i % 10000 === 0) {
this.logger.info(`consistency check progress`, {
index: i,
})
}
}
this.logger.info('consistency check complete')
await this.state.db.putConsistencyCheckFlag(true)
}
}
protected async _start(): Promise<void> {
await this.ensure()
await this.checkConsistency()
while (this.running) {
try {
const highestSyncedL2BlockNumber =
......@@ -240,6 +298,8 @@ export class L2IngestionService extends BaseService<L2IngestionServiceOptions> {
)
}
const end = this.l2IngestionMetrics.fetchBlocksRequestTime.startTimer()
const resp = await axios.post(
this.state.l2RpcProvider.connection.url,
req,
......@@ -249,6 +309,8 @@ export class L2IngestionService extends BaseService<L2IngestionServiceOptions> {
yieldRate: 4096, // this yields abit more often than the default of 16384
})
end()
result = respJson.result
if (result === null) {
retry++
......
......@@ -89,6 +89,10 @@ export class L1DataTransportService extends BaseService<L1DataTransportServiceOp
}
})
this.state.metrics.client.collectDefaultMetrics({
prefix: 'data_transport_layer_'
})
this.state.failureCounter = new this.state.metrics.client.Counter({
name: 'data_transport_layer_main_service_failures',
help: 'Counts the number of times that the main service fails',
......
......@@ -52,7 +52,7 @@
"eslint-plugin-react": "^7.24.0",
"eslint-plugin-unicorn": "^32.0.1",
"ethereum-waffle": "^3.3.0",
"hardhat": "^2.3.0",
"hardhat": "^2.9.2",
"lint-staged": "11.0.0",
"prettier": "^2.3.1",
"ts-node": "^10.0.0",
......
......@@ -55,7 +55,7 @@
"eslint-plugin-unicorn": "^32.0.1",
"ethereum-waffle": "^3.4.0",
"ethers": "^5.5.4",
"hardhat": "^2.3.0",
"hardhat": "^2.9.2",
"lint-staged": "11.0.0",
"mocha": "^8.4.0",
"nyc": "^15.1.0",
......
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment