Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
N
nebula
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
exchain
nebula
Commits
2d34f162
Unverified
Commit
2d34f162
authored
Oct 06, 2022
by
Maurelian
Committed by
GitHub
Oct 06, 2022
Browse files
Options
Browse Files
Download
Plain Diff
Merge branch 'develop' into ctb/prettier-tests
parents
01ed5651
15b73f11
Changes
19
Expand all
Hide whitespace changes
Inline
Side-by-side
Showing
19 changed files
with
508 additions
and
144 deletions
+508
-144
chilly-cups-agree.md
.changeset/chilly-cups-agree.md
+5
-0
serious-pianos-complain.md
.changeset/serious-pianos-complain.md
+5
-0
config.yml
.circleci/config.yml
+17
-4
db.go
indexer/db/db.go
+103
-75
eth.go
indexer/db/eth.go
+4
-2
l1block.go
indexer/db/l1block.go
+5
-7
sql.go
indexer/db/sql.go
+29
-7
state_batch.go
indexer/db/state_batch.go
+30
-0
withdrawal.go
indexer/db/withdrawal.go
+51
-13
service.go
indexer/services/l1/service.go
+5
-6
service.go
indexer/services/l2/service.go
+1
-11
l2_sequencer.go
op-e2e/actions/l2_sequencer.go
+94
-0
l2_sequencer_test.go
op-e2e/actions/l2_sequencer_test.go
+102
-0
l2_verifier.go
op-e2e/actions/l2_verifier.go
+29
-8
metrics.go
op-node/testutils/metrics.go
+7
-0
Dockerfile
ops/docker/ci-builder/Dockerfile
+1
-8
README.md
packages/contracts/deployments/goerli/README.md
+2
-3
types.ts
packages/sdk/src/interfaces/types.ts
+1
-0
chain-constants.ts
packages/sdk/src/utils/chain-constants.ts
+17
-0
No files found.
.changeset/chilly-cups-agree.md
0 → 100644
View file @
2d34f162
---
'
@eth-optimism/sdk'
:
patch
---
Adds contract addresses for the Bedrock Alpha testnet
.changeset/s
low-stingrays-rescue
.md
→
.changeset/s
erious-pianos-complain
.md
View file @
2d34f162
...
@@ -2,4 +2,4 @@
...
@@ -2,4 +2,4 @@
'
@eth-optimism/ci-builder'
:
patch
'
@eth-optimism/ci-builder'
:
patch
---
---
Install slither from a specific commit hash
Pin slither version to 0.9.0
.circleci/config.yml
View file @
2d34f162
...
@@ -145,7 +145,7 @@ jobs:
...
@@ -145,7 +145,7 @@ jobs:
working_directory
:
packages/contracts-bedrock
working_directory
:
packages/contracts-bedrock
-
run
:
-
run
:
name
:
upload coverage
name
:
upload coverage
command
:
codecov --verbose --clean --flag contracts-bedrock-
forge
command
:
codecov --verbose --clean --flag contracts-bedrock-
tests
environment
:
environment
:
FOUNDRY_PROFILE
:
ci
FOUNDRY_PROFILE
:
ci
-
run
:
-
run
:
...
@@ -190,6 +190,9 @@ jobs:
...
@@ -190,6 +190,9 @@ jobs:
description
:
Regex matching dependent packages
description
:
Regex matching dependent packages
type
:
string
type
:
string
default
:
this-package-does-not-exist
default
:
this-package-does-not-exist
coverage_flag
:
description
:
Coverage flag name
type
:
string
docker
:
docker
:
-
image
:
ethereumoptimism/ci-builder:latest
-
image
:
ethereumoptimism/ci-builder:latest
resource_class
:
large
resource_class
:
large
...
@@ -214,9 +217,7 @@ jobs:
...
@@ -214,9 +217,7 @@ jobs:
working_directory
:
packages/<<parameters.package_name>>
working_directory
:
packages/<<parameters.package_name>>
-
run
:
-
run
:
name
:
Upload coverage
name
:
Upload coverage
command
:
|
command
:
codecov --verbose --clean --flag <<parameters.coverage_flag>>
echo <<parameters.package_name>> # TEMP for debugging
codecov --verbose --clean --flag <<parameters.package_name>>
bedrock-go-tests
:
bedrock-go-tests
:
docker
:
docker
:
...
@@ -637,17 +638,20 @@ workflows:
...
@@ -637,17 +638,20 @@ workflows:
-
yarn-monorepo
-
yarn-monorepo
-
js-lint-test
:
-
js-lint-test
:
name
:
common-ts-tests
name
:
common-ts-tests
coverage_flag
:
common-ts-tests
package_name
:
common-ts
package_name
:
common-ts
requires
:
requires
:
-
yarn-monorepo
-
yarn-monorepo
-
js-lint-test
:
-
js-lint-test
:
name
:
contracts-tests
name
:
contracts-tests
coverage_flag
:
contracts-tests
package_name
:
contracts
package_name
:
contracts
dependencies
:
hardhat-deploy-config
dependencies
:
hardhat-deploy-config
requires
:
requires
:
-
yarn-monorepo
-
yarn-monorepo
-
js-lint-test
:
-
js-lint-test
:
name
:
core-utils-tests
name
:
core-utils-tests
coverage_flag
:
core-utils-tests
package_name
:
core-utils
package_name
:
core-utils
requires
:
requires
:
-
yarn-monorepo
-
yarn-monorepo
...
@@ -659,54 +663,63 @@ workflows:
...
@@ -659,54 +663,63 @@ workflows:
-
yarn-monorepo
-
yarn-monorepo
-
js-lint-test
:
-
js-lint-test
:
name
:
actor-tests-tests
name
:
actor-tests-tests
coverage_flag
:
actor-tests-tests
package_name
:
actor-tests
package_name
:
actor-tests
dependencies
:
"
(core-utils|sdk)"
dependencies
:
"
(core-utils|sdk)"
requires
:
requires
:
-
yarn-monorepo
-
yarn-monorepo
-
js-lint-test
:
-
js-lint-test
:
name
:
contracts-governance-tests
name
:
contracts-governance-tests
coverage_flag
:
contracts-governance-tests
package_name
:
contracts-governance
package_name
:
contracts-governance
dependencies
:
"
(core-utils|sdk)"
dependencies
:
"
(core-utils|sdk)"
requires
:
requires
:
-
yarn-monorepo
-
yarn-monorepo
-
js-lint-test
:
-
js-lint-test
:
name
:
contracts-periphery-tests
name
:
contracts-periphery-tests
coverage_flag
:
contracts-periphery-tests
package_name
:
contracts-periphery
package_name
:
contracts-periphery
dependencies
:
"
(contracts|contracts-bedrock|core-utils|hardhat-deploy-config)"
dependencies
:
"
(contracts|contracts-bedrock|core-utils|hardhat-deploy-config)"
requires
:
requires
:
-
yarn-monorepo
-
yarn-monorepo
-
js-lint-test
:
-
js-lint-test
:
name
:
dtl-tests
name
:
dtl-tests
coverage_flag
:
dtl-tests
package_name
:
data-transport-layer
package_name
:
data-transport-layer
dependencies
:
"
(common-ts|contracts|core-utils)"
dependencies
:
"
(common-ts|contracts|core-utils)"
requires
:
requires
:
-
yarn-monorepo
-
yarn-monorepo
-
js-lint-test
:
-
js-lint-test
:
name
:
drippie-mon-tests
name
:
drippie-mon-tests
coverage_flag
:
drippie-mon-tests
package_name
:
drippie-mon
package_name
:
drippie-mon
dependencies
:
"
(common-ts|contracts-periphery|core-utils|sdk)"
dependencies
:
"
(common-ts|contracts-periphery|core-utils|sdk)"
requires
:
requires
:
-
yarn-monorepo
-
yarn-monorepo
-
js-lint-test
:
-
js-lint-test
:
name
:
fault-detector-tests
name
:
fault-detector-tests
coverage_flag
:
fault-detector-tests
package_name
:
fault-detector
package_name
:
fault-detector
dependencies
:
"
(common-ts|contracts|core-utils|sdk)"
dependencies
:
"
(common-ts|contracts|core-utils|sdk)"
requires
:
requires
:
-
yarn-monorepo
-
yarn-monorepo
-
js-lint-test
:
-
js-lint-test
:
name
:
message-relayer-tests
name
:
message-relayer-tests
coverage_flag
:
message-relayer-tests
package_name
:
message-relayer
package_name
:
message-relayer
dependencies
:
"
(common-ts|core-utils|sdk)"
dependencies
:
"
(common-ts|core-utils|sdk)"
requires
:
requires
:
-
yarn-monorepo
-
yarn-monorepo
-
js-lint-test
:
-
js-lint-test
:
name
:
replica-healthcheck-tests
name
:
replica-healthcheck-tests
coverage_flag
:
replica-healthcheck-tests
package_name
:
replica-healthcheck
package_name
:
replica-healthcheck
dependencies
:
"
(common-ts|core-utils)"
dependencies
:
"
(common-ts|core-utils)"
requires
:
requires
:
-
yarn-monorepo
-
yarn-monorepo
-
js-lint-test
:
-
js-lint-test
:
name
:
sdk-tests
name
:
sdk-tests
coverage_flag
:
sdk-tests
package_name
:
sdk
package_name
:
sdk
dependencies
:
"
(contracts|core-utils)"
dependencies
:
"
(contracts|core-utils)"
requires
:
requires
:
...
...
indexer/db/db.go
View file @
2d34f162
This diff is collapsed.
Click to expand it.
indexer/db/eth.go
View file @
2d34f162
...
@@ -2,10 +2,12 @@ package db
...
@@ -2,10 +2,12 @@ package db
import
"github.com/ethereum/go-ethereum/common"
import
"github.com/ethereum/go-ethereum/common"
var
ETHL1Address
common
.
Address
// ETHL1Token is a placeholder token for differentiating ETH transactions from
// ETHL1Token is a placeholder token for differentiating ETH transactions from
// ERC20 transactions on L1.
// ERC20 transactions on L1.
var
ETHL1Token
=
&
Token
{
var
ETHL1Token
=
&
Token
{
Address
:
"0x0000000000000000000000000000000000000000"
,
Address
:
ETHL1Address
.
String
()
,
Name
:
"Ethereum"
,
Name
:
"Ethereum"
,
Symbol
:
"ETH"
,
Symbol
:
"ETH"
,
Decimals
:
18
,
Decimals
:
18
,
...
@@ -18,7 +20,7 @@ var ETHL2Address = common.HexToAddress("0xDeadDeAddeAddEAddeadDEaDDEAdDeaDDeAD00
...
@@ -18,7 +20,7 @@ var ETHL2Address = common.HexToAddress("0xDeadDeAddeAddEAddeadDEaDDEAdDeaDDeAD00
// ETHL2Token is a placeholder token for differentiating ETH transactions from
// ETHL2Token is a placeholder token for differentiating ETH transactions from
// ERC20 transactions on L2.
// ERC20 transactions on L2.
var
ETHL2Token
=
&
Token
{
var
ETHL2Token
=
&
Token
{
Address
:
"0xDeadDeAddeAddEAddeadDEaDDEAdDeaDDeAD0000"
,
Address
:
ETHL2Address
.
String
()
,
Name
:
"Ethereum"
,
Name
:
"Ethereum"
,
Symbol
:
"ETH"
,
Symbol
:
"ETH"
,
Decimals
:
18
,
Decimals
:
18
,
...
...
indexer/db/l1block.go
View file @
2d34f162
...
@@ -6,12 +6,11 @@ import (
...
@@ -6,12 +6,11 @@ import (
// IndexedL1Block contains the L1 block including the deposits in it.
// IndexedL1Block contains the L1 block including the deposits in it.
type
IndexedL1Block
struct
{
type
IndexedL1Block
struct
{
Hash
common
.
Hash
Hash
common
.
Hash
ParentHash
common
.
Hash
ParentHash
common
.
Hash
Number
uint64
Number
uint64
Timestamp
uint64
Timestamp
uint64
Deposits
[]
Deposit
Deposits
[]
Deposit
Withdrawals
[]
Withdrawal
}
}
// String returns the block hash for the indexed l1 block.
// String returns the block hash for the indexed l1 block.
...
@@ -25,7 +24,6 @@ type IndexedL2Block struct {
...
@@ -25,7 +24,6 @@ type IndexedL2Block struct {
ParentHash
common
.
Hash
ParentHash
common
.
Hash
Number
uint64
Number
uint64
Timestamp
uint64
Timestamp
uint64
Deposits
[]
Deposit
Withdrawals
[]
Withdrawal
Withdrawals
[]
Withdrawal
}
}
...
...
indexer/db/sql.go
View file @
2d34f162
...
@@ -28,10 +28,8 @@ CREATE TABLE IF NOT EXISTS deposits (
...
@@ -28,10 +28,8 @@ CREATE TABLE IF NOT EXISTS deposits (
amount VARCHAR NOT NULL,
amount VARCHAR NOT NULL,
data BYTEA NOT NULL,
data BYTEA NOT NULL,
log_index INTEGER NOT NULL,
log_index INTEGER NOT NULL,
l1_block_hash VARCHAR NOT NULL REFERENCES l1_blocks(hash),
block_hash VARCHAR NOT NULL REFERENCES l1_blocks(hash),
l2_block_hash VARCHAR REFERENCES l2_blocks(hash),
tx_hash VARCHAR NOT NULL
tx_hash VARCHAR NOT NULL,
failed BOOLEAN NOT NULL DEFAULT false
)
)
`
`
...
@@ -53,6 +51,20 @@ CREATE TABLE IF NOT EXISTS l2_tokens (
...
@@ -53,6 +51,20 @@ CREATE TABLE IF NOT EXISTS l2_tokens (
)
)
`
`
const
createStateBatchesTable
=
`
CREATE TABLE IF NOT EXISTS state_batches (
index INTEGER NOT NULL PRIMARY KEY,
root VARCHAR NOT NULL,
size INTEGER NOT NULL,
prev_total INTEGER NOT NULL,
extra_data BYTEA NOT NULL,
block_hash VARCHAR NOT NULL REFERENCES l1_blocks(hash)
);
CREATE INDEX IF NOT EXISTS state_batches_block_hash ON state_batches(block_hash);
CREATE INDEX IF NOT EXISTS state_batches_size ON state_batches(size);
CREATE INDEX IF NOT EXISTS state_batches_prev_total ON state_batches(prev_total);
`
const
createWithdrawalsTable
=
`
const
createWithdrawalsTable
=
`
CREATE TABLE IF NOT EXISTS withdrawals (
CREATE TABLE IF NOT EXISTS withdrawals (
guid VARCHAR PRIMARY KEY NOT NULL,
guid VARCHAR PRIMARY KEY NOT NULL,
...
@@ -63,9 +75,9 @@ CREATE TABLE IF NOT EXISTS withdrawals (
...
@@ -63,9 +75,9 @@ CREATE TABLE IF NOT EXISTS withdrawals (
amount VARCHAR NOT NULL,
amount VARCHAR NOT NULL,
data BYTEA NOT NULL,
data BYTEA NOT NULL,
log_index INTEGER NOT NULL,
log_index INTEGER NOT NULL,
l1_block_hash VARCHAR REFERENCES l1
_blocks(hash),
block_hash VARCHAR NOT NULL REFERENCES l2
_blocks(hash),
l2_block_hash VARCHAR NOT NULL REFERENCES l2_blocks(hash)
,
tx_hash VARCHAR NOT NULL
,
tx_hash VARCHAR NOT NULL
state_batch INTEGER REFERENCES state_batches(index)
)
)
`
`
...
@@ -110,15 +122,25 @@ CREATE TABLE IF NOT EXISTS airdrops (
...
@@ -110,15 +122,25 @@ CREATE TABLE IF NOT EXISTS airdrops (
)
)
`
`
const
updateWithdrawalsTable
=
`
ALTER TABLE withdrawals ADD COLUMN IF NOT EXISTS br_withdrawal_hash VARCHAR NULL;
ALTER TABLE withdrawals ADD COLUMN IF NOT EXISTS br_withdrawal_finalized_tx_hash VARCHAR NULL;
ALTER TABLE withdrawals ADD COLUMN IF NOT EXISTS br_withdrawal_finalized_log_index BOOLEAN NULL;
ALTER TABLE withdrawals ADD COLUMN IF NOT EXISTS br_withdrawal_success BOOLEAN NULL;
CREATE INDEX IF NOT EXISTS withdrawals_br_withdrawal_hash ON withdrawals(br_withdrawal_hash);
`
var
schema
=
[]
string
{
var
schema
=
[]
string
{
createL1BlocksTable
,
createL1BlocksTable
,
createL2BlocksTable
,
createL2BlocksTable
,
createL1TokensTable
,
createL1TokensTable
,
createL2TokensTable
,
createL2TokensTable
,
createStateBatchesTable
,
insertETHL1Token
,
insertETHL1Token
,
insertETHL2Token
,
insertETHL2Token
,
createDepositsTable
,
createDepositsTable
,
createWithdrawalsTable
,
createWithdrawalsTable
,
createL1L2NumberIndex
,
createL1L2NumberIndex
,
createAirdropsTable
,
createAirdropsTable
,
updateWithdrawalsTable
,
}
}
indexer/db/state_batch.go
0 → 100644
View file @
2d34f162
package
db
import
(
"math/big"
"github.com/ethereum/go-ethereum/common"
)
// StateBatch is the state batch containing merkle root of the withdrawals
// periodically written to L1.
type
StateBatch
struct
{
Index
*
big
.
Int
Root
common
.
Hash
Size
*
big
.
Int
PrevTotal
*
big
.
Int
ExtraData
[]
byte
BlockHash
common
.
Hash
}
// StateBatchJSON contains StateBatch data suitable for JSON serialization.
type
StateBatchJSON
struct
{
Index
uint64
`json:"index"`
Root
string
`json:"root"`
Size
uint64
`json:"size"`
PrevTotal
uint64
`json:"prevTotal"`
ExtraData
[]
byte
`json:"extraData"`
BlockHash
string
`json:"blockHash"`
BlockNumber
uint64
`json:"blockNumber"`
BlockTimestamp
uint64
`json:"blockTimestamp"`
}
indexer/db/withdrawal.go
View file @
2d34f162
...
@@ -17,6 +17,7 @@ type Withdrawal struct {
...
@@ -17,6 +17,7 @@ type Withdrawal struct {
Amount
*
big
.
Int
Amount
*
big
.
Int
Data
[]
byte
Data
[]
byte
LogIndex
uint
LogIndex
uint
BedrockHash
*
common
.
Hash
}
}
// String returns the tx hash for the withdrawal.
// String returns the tx hash for the withdrawal.
...
@@ -26,17 +27,54 @@ func (w Withdrawal) String() string {
...
@@ -26,17 +27,54 @@ func (w Withdrawal) String() string {
// WithdrawalJSON contains Withdrawal data suitable for JSON serialization.
// WithdrawalJSON contains Withdrawal data suitable for JSON serialization.
type
WithdrawalJSON
struct
{
type
WithdrawalJSON
struct
{
GUID
string
`json:"guid"`
GUID
string
`json:"guid"`
FromAddress
string
`json:"from"`
FromAddress
string
`json:"from"`
ToAddress
string
`json:"to"`
ToAddress
string
`json:"to"`
L1Token
string
`json:"l1Token"`
L1Token
string
`json:"l1Token"`
L2Token
*
Token
`json:"l2Token"`
L2Token
*
Token
`json:"l2Token"`
Amount
string
`json:"amount"`
Amount
string
`json:"amount"`
Data
[]
byte
`json:"data"`
Data
[]
byte
`json:"data"`
LogIndex
uint64
`json:"logIndex"`
LogIndex
uint64
`json:"logIndex"`
L1BlockNumber
uint64
`json:"l1BlockNumber"`
BlockNumber
uint64
`json:"blockNumber"`
L1BlockTimestamp
string
`json:"l1BlockTimestamp"`
BlockTimestamp
string
`json:"blockTimestamp"`
L2BlockNumber
uint64
`json:"l2BlockNumber"`
TxHash
string
`json:"transactionHash"`
L2BlockTimestamp
string
`json:"l2BlockTimestamp"`
Batch
*
StateBatchJSON
`json:"batch"`
TxHash
string
`json:"transactionHash"`
BedrockWithdrawalHash
*
string
`json:"bedrockWithdrawalHash"`
}
type
FinalizationState
int
const
(
FinalizationStateAny
FinalizationState
=
iota
FinalizationStateFinalized
FinalizationStateUnfinalized
)
func
ParseFinalizationState
(
in
string
)
FinalizationState
{
switch
in
{
case
"true"
:
return
FinalizationStateFinalized
case
"false"
:
return
FinalizationStateUnfinalized
default
:
return
FinalizationStateAny
}
}
func
(
f
FinalizationState
)
SQL
()
string
{
switch
f
{
case
FinalizationStateFinalized
:
return
"AND withdrawals.l1_block_hash IS NOT NULL"
case
FinalizationStateUnfinalized
:
return
"AND withdrawals.l2_block_hash IS NULL"
}
return
""
}
type
FinalizedWithdrawal
struct
{
WithdrawalHash
common
.
Hash
TxHash
common
.
Hash
Success
bool
LogIndex
uint
}
}
indexer/services/l1/service.go
View file @
2d34f162
...
@@ -298,12 +298,11 @@ func (s *Service) Update(newHeader *types.Header) error {
...
@@ -298,12 +298,11 @@ func (s *Service) Update(newHeader *types.Header) error {
}
}
block
:=
&
db
.
IndexedL1Block
{
block
:=
&
db
.
IndexedL1Block
{
Hash
:
blockHash
,
Hash
:
blockHash
,
ParentHash
:
header
.
ParentHash
,
ParentHash
:
header
.
ParentHash
,
Number
:
number
,
Number
:
number
,
Timestamp
:
header
.
Time
,
Timestamp
:
header
.
Time
,
Deposits
:
deposits
,
Deposits
:
deposits
,
Withdrawals
:
withdrawals
,
}
}
err
:=
s
.
cfg
.
DB
.
AddIndexedL1Block
(
block
)
err
:=
s
.
cfg
.
DB
.
AddIndexedL1Block
(
block
)
...
...
indexer/services/l2/service.go
View file @
2d34f162
...
@@ -286,7 +286,6 @@ func (s *Service) Update(newHeader *types.Header) error {
...
@@ -286,7 +286,6 @@ func (s *Service) Update(newHeader *types.Header) error {
for
i
,
header
:=
range
headers
{
for
i
,
header
:=
range
headers
{
blockHash
:=
header
.
Hash
()
blockHash
:=
header
.
Hash
()
number
:=
header
.
Number
.
Uint64
()
number
:=
header
.
Number
.
Uint64
()
deposits
:=
depositsByBlockHash
[
blockHash
]
withdrawals
:=
withdrawalsByBlockHash
[
blockHash
]
withdrawals
:=
withdrawalsByBlockHash
[
blockHash
]
if
len
(
withdrawals
)
==
0
&&
i
!=
len
(
headers
)
-
1
{
if
len
(
withdrawals
)
==
0
&&
i
!=
len
(
headers
)
-
1
{
...
@@ -298,7 +297,6 @@ func (s *Service) Update(newHeader *types.Header) error {
...
@@ -298,7 +297,6 @@ func (s *Service) Update(newHeader *types.Header) error {
ParentHash
:
header
.
ParentHash
,
ParentHash
:
header
.
ParentHash
,
Number
:
number
,
Number
:
number
,
Timestamp
:
header
.
Time
,
Timestamp
:
header
.
Time
,
Deposits
:
deposits
,
Withdrawals
:
withdrawals
,
Withdrawals
:
withdrawals
,
}
}
...
@@ -359,15 +357,7 @@ func (s *Service) GetIndexerStatus(w http.ResponseWriter, r *http.Request) {
...
@@ -359,15 +357,7 @@ func (s *Service) GetIndexerStatus(w http.ResponseWriter, r *http.Request) {
}
}
func
(
s
*
Service
)
GetWithdrawalStatus
(
w
http
.
ResponseWriter
,
r
*
http
.
Request
)
{
func
(
s
*
Service
)
GetWithdrawalStatus
(
w
http
.
ResponseWriter
,
r
*
http
.
Request
)
{
vars
:=
mux
.
Vars
(
r
)
// Temporary stub until rest of indexer is landed
withdrawal
,
err
:=
s
.
cfg
.
DB
.
GetWithdrawalStatus
(
common
.
HexToHash
(
vars
[
"hash"
]))
if
err
!=
nil
{
server
.
RespondWithError
(
w
,
http
.
StatusInternalServerError
,
err
.
Error
())
return
}
server
.
RespondWithJSON
(
w
,
http
.
StatusOK
,
withdrawal
)
}
}
func
(
s
*
Service
)
GetWithdrawals
(
w
http
.
ResponseWriter
,
r
*
http
.
Request
)
{
func
(
s
*
Service
)
GetWithdrawals
(
w
http
.
ResponseWriter
,
r
*
http
.
Request
)
{
...
...
op-e2e/actions/l2_sequencer.go
0 → 100644
View file @
2d34f162
package
actions
import
(
"github.com/ethereum-optimism/optimism/op-node/eth"
"github.com/ethereum-optimism/optimism/op-node/rollup"
"github.com/ethereum-optimism/optimism/op-node/rollup/derive"
"github.com/ethereum-optimism/optimism/op-node/rollup/driver"
"github.com/ethereum/go-ethereum/log"
"github.com/stretchr/testify/require"
)
// L2Sequencer is an actor that functions like a rollup node,
// without the full P2P/API/Node stack, but just the derivation state, and simplified driver with sequencing ability.
type
L2Sequencer
struct
{
L2Verifier
sequencer
*
driver
.
Sequencer
l1OriginSelector
*
driver
.
L1OriginSelector
seqOldOrigin
bool
// stay on current L1 origin when sequencing a block, unless forced to adopt the next origin
failL2GossipUnsafeBlock
error
// mock error
}
func
NewL2Sequencer
(
log
log
.
Logger
,
l1
derive
.
L1Fetcher
,
eng
derive
.
Engine
,
cfg
*
rollup
.
Config
,
seqConfDepth
uint64
)
*
L2Sequencer
{
ver
:=
NewL2Verifier
(
log
,
l1
,
eng
,
cfg
)
return
&
L2Sequencer
{
L2Verifier
:
*
ver
,
sequencer
:
driver
.
NewSequencer
(
log
,
cfg
,
l1
,
eng
),
l1OriginSelector
:
driver
.
NewL1OriginSelector
(
log
,
cfg
,
l1
,
seqConfDepth
),
seqOldOrigin
:
false
,
failL2GossipUnsafeBlock
:
nil
,
}
}
// ActL2StartBlock starts building of a new L2 block on top of the head
func
(
s
*
L2Sequencer
)
ActL2StartBlock
(
t
Testing
)
{
if
!
s
.
l2PipelineIdle
{
t
.
InvalidAction
(
"cannot start L2 build when derivation is not idle"
)
return
}
if
s
.
l2Building
{
t
.
InvalidAction
(
"already started building L2 block"
)
return
}
parent
:=
s
.
derivation
.
UnsafeL2Head
()
var
origin
eth
.
L1BlockRef
if
s
.
seqOldOrigin
{
// force old origin, for testing purposes
oldOrigin
,
err
:=
s
.
l1
.
L1BlockRefByHash
(
t
.
Ctx
(),
parent
.
L1Origin
.
Hash
)
require
.
NoError
(
t
,
err
,
"failed to get current origin: %s"
,
parent
.
L1Origin
)
origin
=
oldOrigin
s
.
seqOldOrigin
=
false
// don't repeat this
}
else
{
// select origin the real way
l1Origin
,
err
:=
s
.
l1OriginSelector
.
FindL1Origin
(
t
.
Ctx
(),
s
.
l1State
.
L1Head
(),
parent
)
require
.
NoError
(
t
,
err
)
origin
=
l1Origin
}
err
:=
s
.
sequencer
.
StartBuildingBlock
(
t
.
Ctx
(),
parent
,
s
.
derivation
.
SafeL2Head
()
.
ID
(),
s
.
derivation
.
Finalized
()
.
ID
(),
origin
)
require
.
NoError
(
t
,
err
,
"failed to start block building"
)
s
.
l2Building
=
true
}
// ActL2EndBlock completes a new L2 block and applies it to the L2 chain as new canonical unsafe head
func
(
s
*
L2Sequencer
)
ActL2EndBlock
(
t
Testing
)
{
if
!
s
.
l2Building
{
t
.
InvalidAction
(
"cannot end L2 block building when no block is being built"
)
return
}
s
.
l2Building
=
false
payload
,
err
:=
s
.
sequencer
.
CompleteBuildingBlock
(
t
.
Ctx
())
// TODO: there may be legitimate temporary errors here, if we mock engine API RPC-failure.
// For advanced tests we can catch those and print a warning instead.
require
.
NoError
(
t
,
err
)
ref
,
err
:=
derive
.
PayloadToBlockRef
(
payload
,
&
s
.
rollupCfg
.
Genesis
)
require
.
NoError
(
t
,
err
,
"payload must convert to block ref"
)
s
.
derivation
.
SetUnsafeHead
(
ref
)
// TODO: action-test publishing of payload on p2p
}
// ActL2KeepL1Origin makes the sequencer use the current L1 origin, even if the next origin is available.
func
(
s
*
L2Sequencer
)
ActL2KeepL1Origin
(
t
Testing
)
{
if
s
.
seqOldOrigin
{
// don't do this twice
t
.
InvalidAction
(
"already decided to keep old L1 origin"
)
return
}
s
.
seqOldOrigin
=
true
}
op-e2e/actions/l2_sequencer_test.go
0 → 100644
View file @
2d34f162
package
actions
import
(
"math/big"
"testing"
"github.com/ethereum/go-ethereum/core/types"
"github.com/ethereum/go-ethereum/params"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/log"
"github.com/stretchr/testify/require"
"github.com/ethereum-optimism/optimism/op-e2e/e2eutils"
"github.com/ethereum-optimism/optimism/op-node/sources"
"github.com/ethereum-optimism/optimism/op-node/testlog"
)
func
setupSequencerTest
(
t
Testing
,
sd
*
e2eutils
.
SetupData
,
log
log
.
Logger
)
(
*
L1Miner
,
*
L2Engine
,
*
L2Sequencer
)
{
jwtPath
:=
e2eutils
.
WriteDefaultJWT
(
t
)
miner
:=
NewL1Miner
(
log
,
sd
.
L1Cfg
)
l1F
,
err
:=
sources
.
NewL1Client
(
miner
.
RPCClient
(),
log
,
nil
,
sources
.
L1ClientDefaultConfig
(
sd
.
RollupCfg
,
false
))
require
.
NoError
(
t
,
err
)
engine
:=
NewL2Engine
(
log
,
sd
.
L2Cfg
,
sd
.
RollupCfg
.
Genesis
.
L1
,
jwtPath
)
l2Cl
,
err
:=
sources
.
NewEngineClient
(
engine
.
RPCClient
(),
log
,
nil
,
sources
.
EngineClientDefaultConfig
(
sd
.
RollupCfg
))
require
.
NoError
(
t
,
err
)
sequencer
:=
NewL2Sequencer
(
log
,
l1F
,
l2Cl
,
sd
.
RollupCfg
,
0
)
return
miner
,
engine
,
sequencer
}
func
TestL2Sequencer_SequencerDrift
(
gt
*
testing
.
T
)
{
t
:=
NewDefaultTesting
(
gt
)
p
:=
&
e2eutils
.
TestParams
{
MaxSequencerDrift
:
20
,
// larger than L1 block time we simulate in this test (12)
SequencerWindowSize
:
24
,
ChannelTimeout
:
20
,
}
dp
:=
e2eutils
.
MakeDeployParams
(
t
,
p
)
sd
:=
e2eutils
.
Setup
(
t
,
dp
,
defaultAlloc
)
log
:=
testlog
.
Logger
(
t
,
log
.
LvlDebug
)
miner
,
engine
,
sequencer
:=
setupSequencerTest
(
t
,
sd
,
log
)
miner
.
ActL1SetFeeRecipient
(
common
.
Address
{
'A'
})
sequencer
.
ActL2PipelineFull
(
t
)
signer
:=
types
.
LatestSigner
(
sd
.
L2Cfg
.
Config
)
cl
:=
engine
.
EthClient
()
aliceTx
:=
func
()
{
n
,
err
:=
cl
.
PendingNonceAt
(
t
.
Ctx
(),
dp
.
Addresses
.
Alice
)
require
.
NoError
(
t
,
err
)
tx
:=
types
.
MustSignNewTx
(
dp
.
Secrets
.
Alice
,
signer
,
&
types
.
DynamicFeeTx
{
ChainID
:
sd
.
L2Cfg
.
Config
.
ChainID
,
Nonce
:
n
,
GasTipCap
:
big
.
NewInt
(
2
*
params
.
GWei
),
GasFeeCap
:
new
(
big
.
Int
)
.
Add
(
miner
.
l1Chain
.
CurrentBlock
()
.
BaseFee
(),
big
.
NewInt
(
2
*
params
.
GWei
)),
Gas
:
params
.
TxGas
,
To
:
&
dp
.
Addresses
.
Bob
,
Value
:
e2eutils
.
Ether
(
2
),
})
require
.
NoError
(
gt
,
cl
.
SendTransaction
(
t
.
Ctx
(),
tx
))
}
makeL2BlockWithAliceTx
:=
func
()
{
aliceTx
()
sequencer
.
ActL2StartBlock
(
t
)
engine
.
ActL2IncludeTx
(
dp
.
Addresses
.
Alice
)(
t
)
// include a test tx from alice
sequencer
.
ActL2EndBlock
(
t
)
}
// L1 makes a block
miner
.
ActL1StartBlock
(
12
)(
t
)
miner
.
ActL1EndBlock
(
t
)
sequencer
.
ActL1HeadSignal
(
t
)
origin
:=
miner
.
l1Chain
.
CurrentBlock
()
// L2 makes blocks to catch up
for
sequencer
.
SyncStatus
()
.
UnsafeL2
.
Time
+
sd
.
RollupCfg
.
BlockTime
<
origin
.
Time
()
{
makeL2BlockWithAliceTx
()
require
.
Equal
(
t
,
uint64
(
0
),
sequencer
.
SyncStatus
()
.
UnsafeL2
.
L1Origin
.
Number
,
"no L1 origin change before time matches"
)
}
// Check that we adopted the origin as soon as we could (conf depth is 0)
makeL2BlockWithAliceTx
()
require
.
Equal
(
t
,
uint64
(
1
),
sequencer
.
SyncStatus
()
.
UnsafeL2
.
L1Origin
.
Number
,
"L1 origin changes as soon as L2 time equals or exceeds L1 time"
)
miner
.
ActL1StartBlock
(
12
)(
t
)
miner
.
ActL1EndBlock
(
t
)
sequencer
.
ActL1HeadSignal
(
t
)
// Make blocks up till the sequencer drift is about to surpass, but keep the old L1 origin
for
sequencer
.
SyncStatus
()
.
UnsafeL2
.
Time
+
sd
.
RollupCfg
.
BlockTime
<
origin
.
Time
()
+
sd
.
RollupCfg
.
MaxSequencerDrift
{
sequencer
.
ActL2KeepL1Origin
(
t
)
makeL2BlockWithAliceTx
()
require
.
Equal
(
t
,
uint64
(
1
),
sequencer
.
SyncStatus
()
.
UnsafeL2
.
L1Origin
.
Number
,
"expected to keep old L1 origin"
)
}
// We passed the sequencer drift: we can still keep the old origin, but can't include any txs
sequencer
.
ActL2KeepL1Origin
(
t
)
sequencer
.
ActL2StartBlock
(
t
)
require
.
True
(
t
,
engine
.
l2ForceEmpty
,
"engine should not be allowed to include anything after sequencer drift is surpassed"
)
}
op-e2e/actions/l2_verifier.go
View file @
2d34f162
...
@@ -4,6 +4,9 @@ import (
...
@@ -4,6 +4,9 @@ import (
"errors"
"errors"
"io"
"io"
"github.com/ethereum-optimism/optimism/op-node/rollup/driver"
"github.com/stretchr/testify/require"
"github.com/ethereum/go-ethereum/log"
"github.com/ethereum/go-ethereum/log"
"github.com/ethereum-optimism/optimism/op-node/eth"
"github.com/ethereum-optimism/optimism/op-node/eth"
...
@@ -22,9 +25,8 @@ type L2Verifier struct {
...
@@ -22,9 +25,8 @@ type L2Verifier struct {
// L2 rollup
// L2 rollup
derivation
*
derive
.
DerivationPipeline
derivation
*
derive
.
DerivationPipeline
l1Head
eth
.
L1BlockRef
l1
derive
.
L1Fetcher
l1Safe
eth
.
L1BlockRef
l1State
*
driver
.
L1State
l1Finalized
eth
.
L1BlockRef
l2PipelineIdle
bool
l2PipelineIdle
bool
l2Building
bool
l2Building
bool
...
@@ -33,12 +35,15 @@ type L2Verifier struct {
...
@@ -33,12 +35,15 @@ type L2Verifier struct {
}
}
func
NewL2Verifier
(
log
log
.
Logger
,
l1
derive
.
L1Fetcher
,
eng
derive
.
Engine
,
cfg
*
rollup
.
Config
)
*
L2Verifier
{
func
NewL2Verifier
(
log
log
.
Logger
,
l1
derive
.
L1Fetcher
,
eng
derive
.
Engine
,
cfg
*
rollup
.
Config
)
*
L2Verifier
{
pipeline
:=
derive
.
NewDerivationPipeline
(
log
,
cfg
,
l1
,
eng
,
&
testutils
.
TestDerivationMetrics
{})
metrics
:=
&
testutils
.
TestDerivationMetrics
{}
pipeline
:=
derive
.
NewDerivationPipeline
(
log
,
cfg
,
l1
,
eng
,
metrics
)
pipeline
.
Reset
()
pipeline
.
Reset
()
return
&
L2Verifier
{
return
&
L2Verifier
{
log
:
log
,
log
:
log
,
eng
:
eng
,
eng
:
eng
,
derivation
:
pipeline
,
derivation
:
pipeline
,
l1
:
l1
,
l1State
:
driver
.
NewL1State
(
log
,
metrics
),
l2PipelineIdle
:
true
,
l2PipelineIdle
:
true
,
l2Building
:
false
,
l2Building
:
false
,
rollupCfg
:
cfg
,
rollupCfg
:
cfg
,
...
@@ -48,16 +53,32 @@ func NewL2Verifier(log log.Logger, l1 derive.L1Fetcher, eng derive.Engine, cfg *
...
@@ -48,16 +53,32 @@ func NewL2Verifier(log log.Logger, l1 derive.L1Fetcher, eng derive.Engine, cfg *
func
(
s
*
L2Verifier
)
SyncStatus
()
*
eth
.
SyncStatus
{
func
(
s
*
L2Verifier
)
SyncStatus
()
*
eth
.
SyncStatus
{
return
&
eth
.
SyncStatus
{
return
&
eth
.
SyncStatus
{
CurrentL1
:
s
.
derivation
.
Origin
(),
CurrentL1
:
s
.
derivation
.
Origin
(),
HeadL1
:
s
.
l1
Head
,
HeadL1
:
s
.
l1
State
.
L1Head
()
,
SafeL1
:
s
.
l1S
afe
,
SafeL1
:
s
.
l1S
tate
.
L1Safe
()
,
FinalizedL1
:
s
.
l1
Finalized
,
FinalizedL1
:
s
.
l1
State
.
L1Finalized
()
,
UnsafeL2
:
s
.
derivation
.
UnsafeL2Head
(),
UnsafeL2
:
s
.
derivation
.
UnsafeL2Head
(),
SafeL2
:
s
.
derivation
.
SafeL2Head
(),
SafeL2
:
s
.
derivation
.
SafeL2Head
(),
FinalizedL2
:
s
.
derivation
.
Finalized
(),
FinalizedL2
:
s
.
derivation
.
Finalized
(),
}
}
}
}
// TODO: actions to change L1 head/safe/finalized state. Depends on driver refactor work.
func
(
s
*
L2Verifier
)
ActL1HeadSignal
(
t
Testing
)
{
head
,
err
:=
s
.
l1
.
L1BlockRefByLabel
(
t
.
Ctx
(),
eth
.
Unsafe
)
require
.
NoError
(
t
,
err
)
s
.
l1State
.
HandleNewL1HeadBlock
(
head
)
}
func
(
s
*
L2Verifier
)
ActL1SafeSignal
(
t
Testing
)
{
head
,
err
:=
s
.
l1
.
L1BlockRefByLabel
(
t
.
Ctx
(),
eth
.
Safe
)
require
.
NoError
(
t
,
err
)
s
.
l1State
.
HandleNewL1SafeBlock
(
head
)
}
func
(
s
*
L2Verifier
)
ActL1FinalizedSignal
(
t
Testing
)
{
head
,
err
:=
s
.
l1
.
L1BlockRefByLabel
(
t
.
Ctx
(),
eth
.
Finalized
)
require
.
NoError
(
t
,
err
)
s
.
l1State
.
HandleNewL1FinalizedBlock
(
head
)
}
// ActL2PipelineStep runs one iteration of the L2 derivation pipeline
// ActL2PipelineStep runs one iteration of the L2 derivation pipeline
func
(
s
*
L2Verifier
)
ActL2PipelineStep
(
t
Testing
)
{
func
(
s
*
L2Verifier
)
ActL2PipelineStep
(
t
Testing
)
{
...
...
op-node/testutils/metrics.go
View file @
2d34f162
...
@@ -5,11 +5,18 @@ import "github.com/ethereum-optimism/optimism/op-node/eth"
...
@@ -5,11 +5,18 @@ import "github.com/ethereum-optimism/optimism/op-node/eth"
// TestDerivationMetrics implements the metrics used in the derivation pipeline as no-op operations.
// TestDerivationMetrics implements the metrics used in the derivation pipeline as no-op operations.
// Optionally a test may hook into the metrics
// Optionally a test may hook into the metrics
type
TestDerivationMetrics
struct
{
type
TestDerivationMetrics
struct
{
FnRecordL1ReorgDepth
func
(
d
uint64
)
FnRecordL1Ref
func
(
name
string
,
ref
eth
.
L1BlockRef
)
FnRecordL1Ref
func
(
name
string
,
ref
eth
.
L1BlockRef
)
FnRecordL2Ref
func
(
name
string
,
ref
eth
.
L2BlockRef
)
FnRecordL2Ref
func
(
name
string
,
ref
eth
.
L2BlockRef
)
FnRecordUnsafePayloads
func
(
length
uint64
,
memSize
uint64
,
next
eth
.
BlockID
)
FnRecordUnsafePayloads
func
(
length
uint64
,
memSize
uint64
,
next
eth
.
BlockID
)
}
}
func
(
t
*
TestDerivationMetrics
)
RecordL1ReorgDepth
(
d
uint64
)
{
if
t
.
FnRecordL1ReorgDepth
!=
nil
{
t
.
FnRecordL1ReorgDepth
(
d
)
}
}
func
(
t
*
TestDerivationMetrics
)
RecordL1Ref
(
name
string
,
ref
eth
.
L1BlockRef
)
{
func
(
t
*
TestDerivationMetrics
)
RecordL1Ref
(
name
string
,
ref
eth
.
L1BlockRef
)
{
if
t
.
FnRecordL1Ref
!=
nil
{
if
t
.
FnRecordL1Ref
!=
nil
{
t
.
FnRecordL1Ref
(
name
,
ref
)
t
.
FnRecordL1Ref
(
name
,
ref
)
...
...
ops/docker/ci-builder/Dockerfile
View file @
2d34f162
...
@@ -48,19 +48,12 @@ RUN apt-get update && \
...
@@ -48,19 +48,12 @@ RUN apt-get update && \
apt-get
install
-y
nodejs
&&
\
apt-get
install
-y
nodejs
&&
\
npm i
-g
yarn
&&
\
npm i
-g
yarn
&&
\
npm i
-g
depcheck
&&
\
npm i
-g
depcheck
&&
\
pip
install
slither-analyzer
==
0.9.0
&&
\
go
install
gotest.tools/gotestsum@latest
&&
\
go
install
gotest.tools/gotestsum@latest
&&
\
curl
-sSfL
https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh
-s
--
-b
$(
go
env
GOPATH
)
/bin v1.48.0
&&
\
curl
-sSfL
https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh
-s
--
-b
$(
go
env
GOPATH
)
/bin v1.48.0
&&
\
curl
-fLSs
https://raw.githubusercontent.com/CircleCI-Public/circleci-cli/master/install.sh | bash
&&
\
curl
-fLSs
https://raw.githubusercontent.com/CircleCI-Public/circleci-cli/master/install.sh | bash
&&
\
chmod
+x /usr/local/bin/check-changed
chmod
+x /usr/local/bin/check-changed
# Install a specific version of slither. The current release does not work with our
# forge test contracts.
WORKDIR
/opt
RUN
git clone https://github.com/crytic/slither
&&
\
cd
slither
&&
\
git checkout 90d13cd3883404a86ef4b3dd6af4d5c234e69a54
&&
\
python3 setup.py
install
RUN
echo
"downloading solidity compilers"
&&
\
RUN
echo
"downloading solidity compilers"
&&
\
curl
-o
solc-linux-amd64-v0.5.17+commit.d19bba13
-sL
https://binaries.soliditylang.org/linux-amd64/solc-linux-amd64-v0.5.17+commit.d19bba13
&&
\
curl
-o
solc-linux-amd64-v0.5.17+commit.d19bba13
-sL
https://binaries.soliditylang.org/linux-amd64/solc-linux-amd64-v0.5.17+commit.d19bba13
&&
\
curl
-o
solc-linux-amd64-v0.8.9+commit.e5eed63a
-sL
https://binaries.soliditylang.org/linux-amd64/solc-linux-amd64-v0.8.9+commit.e5eed63a
&&
\
curl
-o
solc-linux-amd64-v0.8.9+commit.e5eed63a
-sL
https://binaries.soliditylang.org/linux-amd64/solc-linux-amd64-v0.8.9+commit.e5eed63a
&&
\
...
...
packages/contracts/deployments/goerli/README.md
View file @
2d34f162
...
@@ -90,8 +90,8 @@ Proxy__OVM_L1StandardBridge
...
@@ -90,8 +90,8 @@ Proxy__OVM_L1StandardBridge
StateCommitmentChain
StateCommitmentChain
</td>
</td>
<td
align=
"center"
>
<td
align=
"center"
>
<a
href=
"https://goerli.etherscan.io/address/0x
72281826E90dD8A65Ab686fF254eb45Be426DD22
"
>
<a
href=
"https://goerli.etherscan.io/address/0x
9c945aC97Baf48cB784AbBB61399beB71aF7A378
"
>
<code>
0x
72281826E90dD8A65Ab686fF254eb45Be426DD22
</code>
<code>
0x
9c945aC97Baf48cB784AbBB61399beB71aF7A378
</code>
</a>
</a>
</td>
</td>
</tr>
</tr>
...
@@ -190,4 +190,3 @@ WETH9
...
@@ -190,4 +190,3 @@ WETH9
</td>
</td>
</tr>
</tr>
</table>
</table>
packages/sdk/src/interfaces/types.ts
View file @
2d34f162
...
@@ -30,6 +30,7 @@ export enum L2ChainID {
...
@@ -30,6 +30,7 @@ export enum L2ChainID {
OPTIMISM_HARDHAT_LOCAL
=
31337
,
OPTIMISM_HARDHAT_LOCAL
=
31337
,
OPTIMISM_HARDHAT_DEVNET
=
17
,
OPTIMISM_HARDHAT_DEVNET
=
17
,
OPTIMISM_BEDROCK_LOCAL_DEVNET
=
901
,
OPTIMISM_BEDROCK_LOCAL_DEVNET
=
901
,
OPTIMISM_BEDROCK_ALPHA_TESTNET
=
28528
,
}
}
/**
/**
...
...
packages/sdk/src/utils/chain-constants.ts
View file @
2d34f162
...
@@ -23,6 +23,7 @@ export const DEPOSIT_CONFIRMATION_BLOCKS: {
...
@@ -23,6 +23,7 @@ export const DEPOSIT_CONFIRMATION_BLOCKS: {
[
L2ChainID
.
OPTIMISM_HARDHAT_LOCAL
]:
2
as
const
,
[
L2ChainID
.
OPTIMISM_HARDHAT_LOCAL
]:
2
as
const
,
[
L2ChainID
.
OPTIMISM_HARDHAT_DEVNET
]:
2
as
const
,
[
L2ChainID
.
OPTIMISM_HARDHAT_DEVNET
]:
2
as
const
,
[
L2ChainID
.
OPTIMISM_BEDROCK_LOCAL_DEVNET
]:
2
as
const
,
[
L2ChainID
.
OPTIMISM_BEDROCK_LOCAL_DEVNET
]:
2
as
const
,
[
L2ChainID
.
OPTIMISM_BEDROCK_ALPHA_TESTNET
]:
12
as
const
,
}
}
export
const
CHAIN_BLOCK_TIMES
:
{
export
const
CHAIN_BLOCK_TIMES
:
{
...
@@ -146,6 +147,22 @@ export const CONTRACT_ADDRESSES: {
...
@@ -146,6 +147,22 @@ export const CONTRACT_ADDRESSES: {
},
},
l2
:
DEFAULT_L2_CONTRACT_ADDRESSES
,
l2
:
DEFAULT_L2_CONTRACT_ADDRESSES
,
},
},
[
L2ChainID
.
OPTIMISM_BEDROCK_ALPHA_TESTNET
]:
{
l1
:
{
AddressManager
:
'
0xb4e08DcE1F323608229265c9d4125E22a4B9dbAF
'
as
const
,
L1CrossDomainMessenger
:
'
0x838a6DC4E37CA45D4Ef05bb776bf05eEf50798De
'
as
const
,
L1StandardBridge
:
'
0xFf94B6C486350aD92561Ba09bad3a59df764Da92
'
as
const
,
StateCommitmentChain
:
'
0x0000000000000000000000000000000000000000
'
as
const
,
CanonicalTransactionChain
:
'
0x0000000000000000000000000000000000000000
'
as
const
,
BondManager
:
'
0x0000000000000000000000000000000000000000
'
as
const
,
OptimismPortal
:
'
0xA581Ca3353DB73115C4625FFC7aDF5dB379434A8
'
as
const
,
L2OutputOracle
:
'
0x3A234299a14De50027eA65dCdf1c0DaC729e04A6
'
as
const
,
},
l2
:
DEFAULT_L2_CONTRACT_ADDRESSES
,
},
}
}
/**
/**
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment