Commit bed6c08f authored by mergify[bot]'s avatar mergify[bot] Committed by GitHub

Merge branch 'develop' into refcell/load-dispute-games

parents 4eab3291 6adc4546
---
'@eth-optimism/core-utils': patch
---
Upgraded npm dependencies to latest
---
'@eth-optimism/chain-mon': patch
---
Upgraded npm dependencies to latest
...@@ -115,20 +115,23 @@ jobs: ...@@ -115,20 +115,23 @@ jobs:
name: Restore PNPM Package Cache name: Restore PNPM Package Cache
keys: keys:
- pnpm-packages-v2-{{ checksum "pnpm-lock.yaml" }} - pnpm-packages-v2-{{ checksum "pnpm-lock.yaml" }}
- run: # Fetch node_modules into the pnpm store
name: Install dependencies # This will cache node_modules based on pnpm-lock so other steps can instantly install them with `pnpm install --prefer-offline`
command: pnpm install --frozen-lockfile # --prefer-offline installs node_modules instantly by just reading from cache if it exists rather than fetching from network
# when installing node_modules pnpm simply adds symlinks instead of copying the files which is why it is pretty much instant to run --prefer-offline
# this allows a caching strategy of only checking pnpm-lockfile so we don't have to keep it in sync with our packages
# For more information see https://pnpm.io/cli/fetch
- run:
name: Fetch dependencies
command: pnpm fetch --frozen-lockfile --prefer-offline
- save_cache: - save_cache:
name: Save PNPM Package Cache name: Save PNPM Package Cache
key: pnpm-packages-v2-{{ checksum "pnpm-lock.yaml" }} key: pnpm-packages-v2-{{ checksum "pnpm-lock.yaml" }}
paths: paths:
- "node_modules" - "node_modules"
- "packages/chain-mon/node_modules" - run:
- "packages/common-ts/node_modules" name: Install dependencies
- "packages/contracts-bedrock/node_modules" command: pnpm install --frozen-lockfile --offline
- "packages/core-utils/node_modules"
- "packages/sdk/node_modules"
- "packages/contracts-ts/node_modules"
- run: - run:
name: print forge version name: print forge version
command: forge --version command: forge --version
...@@ -469,6 +472,10 @@ jobs: ...@@ -469,6 +472,10 @@ jobs:
name: Restore PNPM Package Cache name: Restore PNPM Package Cache
keys: keys:
- pnpm-packages-v2-{{ checksum "pnpm-lock.yaml" }} - pnpm-packages-v2-{{ checksum "pnpm-lock.yaml" }}
# populate node modules from the cache
- run:
name: Install dependencies
command: pnpm install --frozen-lockfile --prefer-offline
- check-changed: - check-changed:
patterns: contracts-bedrock patterns: contracts-bedrock
- run: - run:
...@@ -476,51 +483,6 @@ jobs: ...@@ -476,51 +483,6 @@ jobs:
command: pnpm validate-spacers command: pnpm validate-spacers
working_directory: packages/contracts-bedrock working_directory: packages/contracts-bedrock
bedrock-echidna-build:
docker:
- image: us-docker.pkg.dev/oplabs-tools-artifacts/images/ci-builder:latest
steps:
- checkout
- attach_workspace: { at: "." }
- check-changed:
patterns: contracts-bedrock
- run:
name: Compile with metadata hash
command: pnpm clean && pnpm build:with-metadata
working_directory: packages/contracts-bedrock
- persist_to_workspace:
root: .
paths:
- "node_modules"
- packages/contracts-bedrock
bedrock-echidna-run:
docker:
- image: us-docker.pkg.dev/oplabs-tools-artifacts/images/ci-builder:latest
parameters:
echidna_target:
description: Which echidna fuzz contract to run
type: string
size:
description: Custom resource class size for the run
type: string
default: large
resource_class: <<parameters.size>>
steps:
- checkout
- attach_workspace: { at: "." }
- restore_cache:
name: Restore PNPM Package Cache
keys:
- pnpm-packages-v2-{{ checksum "pnpm-lock.yaml" }}
- check-changed:
patterns: contracts-bedrock
- run:
name: Echidna Fuzz <<parameters.echidna_target>>
command: pnpm echidna:<<parameters.echidna_target>>
working_directory: packages/contracts-bedrock
no_output_timeout: 15m
op-bindings-build: op-bindings-build:
docker: docker:
- image: us-docker.pkg.dev/oplabs-tools-artifacts/images/ci-builder:latest - image: us-docker.pkg.dev/oplabs-tools-artifacts/images/ci-builder:latest
...@@ -556,6 +518,10 @@ jobs: ...@@ -556,6 +518,10 @@ jobs:
- pnpm-packages-v2-{{ checksum "pnpm-lock.yaml" }} - pnpm-packages-v2-{{ checksum "pnpm-lock.yaml" }}
- check-changed: - check-changed:
patterns: <<parameters.package_name>>,<<parameters.dependencies>> patterns: <<parameters.package_name>>,<<parameters.dependencies>>
# populate node modules from the cache
- run:
name: Install dependencies
command: pnpm install --frozen-lockfile --prefer-offline
- run: - run:
name: Lint name: Lint
command: pnpm lint && git diff --exit-code command: pnpm lint && git diff --exit-code
...@@ -581,6 +547,10 @@ jobs: ...@@ -581,6 +547,10 @@ jobs:
- pnpm-packages-v2-{{ checksum "pnpm.lock.yaml" }} - pnpm-packages-v2-{{ checksum "pnpm.lock.yaml" }}
- check-changed: - check-changed:
patterns: sdk,contracts-bedrock,contracts patterns: sdk,contracts-bedrock,contracts
# populate node modules from the cache
- run:
name: Install dependencies
command: pnpm install --frozen-lockfile --prefer-offline
- run: - run:
name: Check generated and build name: Check generated and build
command: pnpm generate:check command: pnpm generate:check
...@@ -599,6 +569,10 @@ jobs: ...@@ -599,6 +569,10 @@ jobs:
- pnpm-packages-v2-{{ checksum "pnpm.lock.yaml" }} - pnpm-packages-v2-{{ checksum "pnpm.lock.yaml" }}
- check-changed: - check-changed:
patterns: sdk,contracts-bedrock,contracts patterns: sdk,contracts-bedrock,contracts
# populate node modules from the cache
- run:
name: Install dependencies
command: pnpm install --frozen-lockfile --prefer-offline
- run: - run:
name: anvil-l1 name: anvil-l1
background: true background: true
...@@ -643,22 +617,13 @@ jobs: ...@@ -643,22 +617,13 @@ jobs:
- checkout - checkout
- check-changed: - check-changed:
patterns: specs/(.*)\.md$ patterns: specs/(.*)\.md$
# TODO remove me after ci builder updated
- run: - run:
name: Install pnpm package manager name: Install pnpm package manager
command: | command: |
npm i pnpm --global npm i pnpm --global
# TODO remove me after ci builder updated
# A github dep clones-with-immutable-args is installed via github
# packages installed via npm via github automatically run postpack scripts
# their postpack script happens to use yarn so we need it here
- run:
name: Install yarn package manager
command: |
npm i yarn@1 --global
- run: - run:
name: pnpm dev deps name: pnpm dev deps
command: pnpm install command: pnpm install --frozen-lockfile --prefer-offline
- run: - run:
name: specs toc name: specs toc
command: pnpm lint:specs:toc && git diff --exit-code ./specs command: pnpm lint:specs:toc && git diff --exit-code ./specs
...@@ -671,7 +636,6 @@ jobs: ...@@ -671,7 +636,6 @@ jobs:
image: ubuntu-2204:2022.07.1 image: ubuntu-2204:2022.07.1
steps: steps:
- checkout - checkout
# TODO remove me after ci builder updated
- run: - run:
name: Install pnpm package manager name: Install pnpm package manager
command: | command: |
...@@ -679,7 +643,7 @@ jobs: ...@@ -679,7 +643,7 @@ jobs:
- run: - run:
name: Install node_modules name: Install node_modules
command: | command: |
pnpm install pnpm install --frozen-lockfile --prefer-offline
- run: - run:
name: Lint check name: Lint check
command: | command: |
...@@ -981,7 +945,7 @@ jobs: ...@@ -981,7 +945,7 @@ jobs:
- run: - run:
name: Install and build name: Install and build
command: | command: |
pnpm install && pnpm build pnpm install --frozen-lockfile --prefer-offline && pnpm build
- run: - run:
name: generate cannon prestate name: generate cannon prestate
command: make cannon-prestate command: make cannon-prestate
......
...@@ -42,7 +42,7 @@ jobs: ...@@ -42,7 +42,7 @@ jobs:
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- name: Fetch tags - name: Fetch tags
run: git fetch --tags origin run: git fetch --tags origin --force
- name: Setup Python 3.10 - name: Setup Python 3.10
uses: actions/setup-python@v4 uses: actions/setup-python@v4
with: with:
......
...@@ -185,3 +185,37 @@ cd packages/contracts ...@@ -185,3 +185,37 @@ cd packages/contracts
pip3 install slither-analyzer pip3 install slither-analyzer
pnpm test:slither pnpm test:slither
``` ```
## Labels
Labels are divided into categories with their descriptions annotated as `<Category Name>: <description>`.
The following are a comprehensive list of label categories.
- **Area labels** ([`A-`][area]): Denote the general area for the related issue or PR changes.
- **Category labels** ([`C-`][category]): Contextualize the type of issue or change.
- **Meta labels** ([`M-`][meta]): These add context to the issues or prs themselves primarily relating to process.
- **Difficulty labels** ([`D-`][difficulty]): Describe the associated implementation's difficulty level.
- **Status labels** ([`S-`][status]): Specify the status of an issue or pr.
Labels also provide a versatile filter for finding tickets that need help or are open for assignment.
This makes them a great tool for contributors!
[area]: https://github.com/ethereum-optimism/optimism/labels?q=a-
[category]: https://github.com/ethereum-optimism/optimism/labels?q=c-
[meta]: https://github.com/ethereum-optimism/optimism/labels?q=m-
[difficulty]: https://github.com/ethereum-optimism/optimism/labels?q=d-
[status]: https://github.com/ethereum-optimism/optimism/labels?q=s-
#### Filtering for Work
To find tickets available for external contribution, take a look at the [`M-community`][M-community] label.
You can filter by the [`D-good-first-issue`][D-good-first-issue]
label to find issues that are intended to be easy to implement or fix.
Also, all labels can be seen by visiting the [labels page][labels]
[labels]: https://github.com/ethereum-optimism/optimism/labels
[M-community]: https://github.com/ethereum-optimism/optimism/labels/M-community
[D-good-first-issue]: https://github.com/ethereum-optimism/optimism/labels/D-good-first-issue
...@@ -11,6 +11,7 @@ require ( ...@@ -11,6 +11,7 @@ require (
github.com/ethereum/go-ethereum v1.12.0 github.com/ethereum/go-ethereum v1.12.0
github.com/fsnotify/fsnotify v1.6.0 github.com/fsnotify/fsnotify v1.6.0
github.com/go-chi/chi/v5 v5.0.10 github.com/go-chi/chi/v5 v5.0.10
github.com/go-chi/docgen v1.2.0
github.com/golang/snappy v0.0.5-0.20220116011046-fa5810519dcb github.com/golang/snappy v0.0.5-0.20220116011046-fa5810519dcb
github.com/google/go-cmp v0.5.9 github.com/google/go-cmp v0.5.9
github.com/google/gofuzz v1.2.1-0.20220503160820-4a35382e8fc8 github.com/google/gofuzz v1.2.1-0.20220503160820-4a35382e8fc8
......
...@@ -192,8 +192,12 @@ github.com/gin-gonic/gin v1.8.1 h1:4+fr/el88TOO3ewCmQr8cx/CtZ/umlIRIs5M4NTNjf8= ...@@ -192,8 +192,12 @@ github.com/gin-gonic/gin v1.8.1 h1:4+fr/el88TOO3ewCmQr8cx/CtZ/umlIRIs5M4NTNjf8=
github.com/gliderlabs/ssh v0.1.1/go.mod h1:U7qILu1NlMHj9FlMhZLlkCdDnU1DBEAqr0aevW3Awn0= github.com/gliderlabs/ssh v0.1.1/go.mod h1:U7qILu1NlMHj9FlMhZLlkCdDnU1DBEAqr0aevW3Awn0=
github.com/go-check/check v0.0.0-20180628173108-788fd7840127/go.mod h1:9ES+weclKsC9YodN5RgxqK/VD9HM9JsCSh7rNhMZE98= github.com/go-check/check v0.0.0-20180628173108-788fd7840127/go.mod h1:9ES+weclKsC9YodN5RgxqK/VD9HM9JsCSh7rNhMZE98=
github.com/go-chi/chi/v5 v5.0.0/go.mod h1:BBug9lr0cqtdAhsu6R4AAdvufI0/XBzAQSsUqJpoZOs= github.com/go-chi/chi/v5 v5.0.0/go.mod h1:BBug9lr0cqtdAhsu6R4AAdvufI0/XBzAQSsUqJpoZOs=
github.com/go-chi/chi/v5 v5.0.1/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8=
github.com/go-chi/chi/v5 v5.0.10 h1:rLz5avzKpjqxrYwXNfmjkrYYXOyLJd37pz53UFHC6vk= github.com/go-chi/chi/v5 v5.0.10 h1:rLz5avzKpjqxrYwXNfmjkrYYXOyLJd37pz53UFHC6vk=
github.com/go-chi/chi/v5 v5.0.10/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8= github.com/go-chi/chi/v5 v5.0.10/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8=
github.com/go-chi/docgen v1.2.0 h1:da0Nq2PKU9W9pSOTUfVrKI1vIgTGpauo9cfh4Iwivek=
github.com/go-chi/docgen v1.2.0/go.mod h1:G9W0G551cs2BFMSn/cnGwX+JBHEloAgo17MBhyrnhPI=
github.com/go-chi/render v1.0.1/go.mod h1:pq4Rr7HbnsdaeHagklXub+p6Wd16Af5l9koip1OvJns=
github.com/go-errors/errors v1.0.1/go.mod h1:f4zRHt4oKfwPJE5k8C9vpYG+aDHdBFUsgrm6/TyX73Q= github.com/go-errors/errors v1.0.1/go.mod h1:f4zRHt4oKfwPJE5k8C9vpYG+aDHdBFUsgrm6/TyX73Q=
github.com/go-errors/errors v1.4.2 h1:J6MZopCL4uSllY1OfXM374weqZFFItUbrImctkmUxIA= github.com/go-errors/errors v1.4.2 h1:J6MZopCL4uSllY1OfXM374weqZFFItUbrImctkmUxIA=
github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY=
......
...@@ -21,7 +21,7 @@ func NewApi(bv database.BridgeTransfersView, logger log.Logger) *Api { ...@@ -21,7 +21,7 @@ func NewApi(bv database.BridgeTransfersView, logger log.Logger) *Api {
r := chi.NewRouter() r := chi.NewRouter()
h := routes.NewRoutes(logger, bv) h := routes.NewRoutes(logger, bv, r)
api := &Api{Router: r} api := &Api{Router: r}
......
...@@ -2,7 +2,6 @@ package api ...@@ -2,7 +2,6 @@ package api
import ( import (
"fmt" "fmt"
"math/big"
"net/http" "net/http"
"net/http/httptest" "net/http/httptest"
"testing" "testing"
...@@ -21,17 +20,21 @@ var mockAddress = "0x4204204204204204204204204204204204204204" ...@@ -21,17 +20,21 @@ var mockAddress = "0x4204204204204204204204204204204204204204"
var ( var (
deposit = database.L1BridgeDeposit{ deposit = database.L1BridgeDeposit{
TransactionSourceHash: common.HexToHash("abc"), TransactionSourceHash: common.HexToHash("abc"),
CrossDomainMessengerNonce: &database.U256{Int: big.NewInt(0)}, BridgeTransfer: database.BridgeTransfer{
Tx: database.Transaction{}, CrossDomainMessageHash: &common.Hash{},
TokenPair: database.TokenPair{}, Tx: database.Transaction{},
TokenPair: database.TokenPair{},
},
} }
withdrawal = database.L2BridgeWithdrawal{ withdrawal = database.L2BridgeWithdrawal{
TransactionWithdrawalHash: common.HexToHash("0x420"), TransactionWithdrawalHash: common.HexToHash("0x420"),
CrossDomainMessengerNonce: &database.U256{Int: big.NewInt(0)}, BridgeTransfer: database.BridgeTransfer{
Tx: database.Transaction{}, CrossDomainMessageHash: &common.Hash{},
TokenPair: database.TokenPair{}, Tx: database.Transaction{},
TokenPair: database.TokenPair{},
},
} }
) )
...@@ -39,7 +42,7 @@ func (mbv *MockBridgeTransfersView) L1BridgeDeposit(hash common.Hash) (*database ...@@ -39,7 +42,7 @@ func (mbv *MockBridgeTransfersView) L1BridgeDeposit(hash common.Hash) (*database
return &deposit, nil return &deposit, nil
} }
func (mbv *MockBridgeTransfersView) L1BridgeDepositByCrossDomainMessengerNonce(nonce *big.Int) (*database.L1BridgeDeposit, error) { func (mbv *MockBridgeTransfersView) L1BridgeDepositWithFilter(filter database.BridgeTransfer) (*database.L1BridgeDeposit, error) {
return &deposit, nil return &deposit, nil
} }
...@@ -56,7 +59,7 @@ func (mbv *MockBridgeTransfersView) L2BridgeWithdrawal(address common.Hash) (*da ...@@ -56,7 +59,7 @@ func (mbv *MockBridgeTransfersView) L2BridgeWithdrawal(address common.Hash) (*da
return &withdrawal, nil return &withdrawal, nil
} }
func (mbv *MockBridgeTransfersView) L2BridgeWithdrawalByCrossDomainMessengerNonce(nonce *big.Int) (*database.L2BridgeWithdrawal, error) { func (mbv *MockBridgeTransfersView) L2BridgeWithdrawalWithFilter(filter database.BridgeTransfer) (*database.L2BridgeWithdrawal, error) {
return &withdrawal, nil return &withdrawal, nil
} }
......
...@@ -48,7 +48,7 @@ func newDepositResponse(deposits []*database.L1BridgeDepositWithTransactionHashe ...@@ -48,7 +48,7 @@ func newDepositResponse(deposits []*database.L1BridgeDepositWithTransactionHashe
Amount: deposit.L1BridgeDeposit.Tx.Amount.Int.String(), Amount: deposit.L1BridgeDeposit.Tx.Amount.Int.String(),
L1Token: TokenInfo{ L1Token: TokenInfo{
ChainId: 1, ChainId: 1,
Address: deposit.L1BridgeDeposit.TokenPair.L1TokenAddress.String(), Address: deposit.L1BridgeDeposit.TokenPair.LocalTokenAddress.String(),
Name: "TODO", Name: "TODO",
Symbol: "TODO", Symbol: "TODO",
Decimals: 420, Decimals: 420,
...@@ -58,7 +58,7 @@ func newDepositResponse(deposits []*database.L1BridgeDepositWithTransactionHashe ...@@ -58,7 +58,7 @@ func newDepositResponse(deposits []*database.L1BridgeDepositWithTransactionHashe
}, },
L2Token: TokenInfo{ L2Token: TokenInfo{
ChainId: 10, ChainId: 10,
Address: deposit.L1BridgeDeposit.TokenPair.L2TokenAddress.String(), Address: deposit.L1BridgeDeposit.TokenPair.RemoteTokenAddress.String(),
Name: "TODO", Name: "TODO",
Symbol: "TODO", Symbol: "TODO",
Decimals: 420, Decimals: 420,
......
package routes
import (
"net/http"
"github.com/go-chi/docgen"
)
func (h Routes) DocsHandler(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "text/plain")
w.WriteHeader(http.StatusOK)
docs := docgen.MarkdownRoutesDoc(h.Router, docgen.MarkdownOpts{
ProjectPath: "github.com/ethereum-optimism/optimism/indexer",
// Intro text included at the top of the generated markdown file.
Intro: "Generated documentation for Optimism indexer",
})
_, err := w.Write([]byte(docs))
if err != nil {
h.Logger.Error("error writing docs", "err", err)
http.Error(w, "Internal server error fetching docs", http.StatusInternalServerError)
}
}
...@@ -3,16 +3,19 @@ package routes ...@@ -3,16 +3,19 @@ package routes
import ( import (
"github.com/ethereum-optimism/optimism/indexer/database" "github.com/ethereum-optimism/optimism/indexer/database"
"github.com/ethereum/go-ethereum/log" "github.com/ethereum/go-ethereum/log"
"github.com/go-chi/chi/v5"
) )
type Routes struct { type Routes struct {
Logger log.Logger Logger log.Logger
BridgeTransfersView database.BridgeTransfersView BridgeTransfersView database.BridgeTransfersView
Router *chi.Mux
} }
func NewRoutes(logger log.Logger, bv database.BridgeTransfersView) Routes { func NewRoutes(logger log.Logger, bv database.BridgeTransfersView, r *chi.Mux) Routes {
return Routes{ return Routes{
Logger: logger, Logger: logger,
BridgeTransfersView: bv, BridgeTransfersView: bv,
Router: r,
} }
} }
...@@ -73,7 +73,7 @@ func newWithdrawalResponse(withdrawals []*database.L2BridgeWithdrawalWithTransac ...@@ -73,7 +73,7 @@ func newWithdrawalResponse(withdrawals []*database.L2BridgeWithdrawalWithTransac
WithdrawalState: "COMPLETE", // TODO WithdrawalState: "COMPLETE", // TODO
L1Token: TokenInfo{ L1Token: TokenInfo{
ChainId: 1, ChainId: 1,
Address: withdrawal.L2BridgeWithdrawal.TokenPair.L1TokenAddress.String(), Address: withdrawal.L2BridgeWithdrawal.TokenPair.RemoteTokenAddress.String(),
Name: "Example", // TODO Name: "Example", // TODO
Symbol: "EXAMPLE", // TODO Symbol: "EXAMPLE", // TODO
Decimals: 18, // TODO Decimals: 18, // TODO
...@@ -83,7 +83,7 @@ func newWithdrawalResponse(withdrawals []*database.L2BridgeWithdrawalWithTransac ...@@ -83,7 +83,7 @@ func newWithdrawalResponse(withdrawals []*database.L2BridgeWithdrawalWithTransac
}, },
L2Token: TokenInfo{ L2Token: TokenInfo{
ChainId: 10, ChainId: 10,
Address: withdrawal.L2BridgeWithdrawal.TokenPair.L2TokenAddress.String(), Address: withdrawal.L2BridgeWithdrawal.TokenPair.LocalTokenAddress.String(),
Name: "Example", // TODO Name: "Example", // TODO
Symbol: "EXAMPLE", // TODO Symbol: "EXAMPLE", // TODO
Decimals: 18, // TODO Decimals: 18, // TODO
......
...@@ -25,11 +25,7 @@ type Cli struct { ...@@ -25,11 +25,7 @@ type Cli struct {
} }
func runIndexer(ctx *cli.Context) error { func runIndexer(ctx *cli.Context) error {
logger := log.NewLogger(log.CLIConfig{ logger := log.NewLogger(log.ReadCLIConfig(ctx))
Level: "warn",
Color: false,
Format: "terminal",
})
configPath := ctx.String(ConfigFlag.Name) configPath := ctx.String(ConfigFlag.Name)
cfg, err := config.LoadConfig(logger, configPath) cfg, err := config.LoadConfig(logger, configPath)
...@@ -38,8 +34,6 @@ func runIndexer(ctx *cli.Context) error { ...@@ -38,8 +34,6 @@ func runIndexer(ctx *cli.Context) error {
return err return err
} }
logger = log.NewLogger(cfg.Logger)
db, err := database.NewDB(cfg.DB) db, err := database.NewDB(cfg.DB)
if err != nil { if err != nil {
...@@ -99,6 +93,7 @@ func (c *Cli) Run(args []string) error { ...@@ -99,6 +93,7 @@ func (c *Cli) Run(args []string) error {
func NewCli(GitVersion string, GitCommit string, GitDate string) *Cli { func NewCli(GitVersion string, GitCommit string, GitDate string) *Cli {
flags := []cli.Flag{ConfigFlag} flags := []cli.Flag{ConfigFlag}
flags = append(flags, log.CLIFlags("INDEXER")...)
app := &cli.App{ app := &cli.App{
Version: fmt.Sprintf("%s-%s", GitVersion, params.VersionWithCommit(GitCommit, GitDate)), Version: fmt.Sprintf("%s-%s", GitVersion, params.VersionWithCommit(GitCommit, GitDate)),
Description: "An indexer of all optimism events with a serving api layer", Description: "An indexer of all optimism events with a serving api layer",
......
...@@ -6,8 +6,6 @@ import ( ...@@ -6,8 +6,6 @@ import (
"reflect" "reflect"
"github.com/BurntSushi/toml" "github.com/BurntSushi/toml"
"github.com/ethereum-optimism/optimism/op-service/log"
"github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common"
geth_log "github.com/ethereum/go-ethereum/log" geth_log "github.com/ethereum/go-ethereum/log"
"github.com/joho/godotenv" "github.com/joho/godotenv"
...@@ -22,7 +20,6 @@ type Config struct { ...@@ -22,7 +20,6 @@ type Config struct {
DB DBConfig DB DBConfig
API APIConfig API APIConfig
Metrics MetricsConfig Metrics MetricsConfig
Logger log.CLIConfig
} }
// fetch this via onchain config from RPCsConfig and remove from config in future // fetch this via onchain config from RPCsConfig and remove from config in future
......
...@@ -3,7 +3,6 @@ package database ...@@ -3,7 +3,6 @@ package database
import ( import (
"errors" "errors"
"fmt" "fmt"
"math/big"
"gorm.io/gorm" "gorm.io/gorm"
...@@ -17,8 +16,8 @@ import ( ...@@ -17,8 +16,8 @@ import (
*/ */
type BridgeMessage struct { type BridgeMessage struct {
Nonce U256 `gorm:"primaryKey"` MessageHash common.Hash `gorm:"primaryKey;serializer:json"`
MessageHash common.Hash `gorm:"serializer:json"` Nonce U256
SentMessageEventGUID uuid.UUID SentMessageEventGUID uuid.UUID
RelayedMessageEventGUID *uuid.UUID RelayedMessageEventGUID *uuid.UUID
...@@ -38,13 +37,11 @@ type L2BridgeMessage struct { ...@@ -38,13 +37,11 @@ type L2BridgeMessage struct {
} }
type BridgeMessagesView interface { type BridgeMessagesView interface {
L1BridgeMessage(*big.Int) (*L1BridgeMessage, error) L1BridgeMessage(common.Hash) (*L1BridgeMessage, error)
L1BridgeMessageByHash(common.Hash) (*L1BridgeMessage, error) L1BridgeMessageWithFilter(BridgeMessage) (*L1BridgeMessage, error)
LatestL1BridgeMessageNonce() (*big.Int, error)
L2BridgeMessage(*big.Int) (*L2BridgeMessage, error) L2BridgeMessage(common.Hash) (*L2BridgeMessage, error)
L2BridgeMessageByHash(common.Hash) (*L2BridgeMessage, error) L2BridgeMessageWithFilter(BridgeMessage) (*L2BridgeMessage, error)
LatestL2BridgeMessageNonce() (*big.Int, error)
} }
type BridgeMessagesDB interface { type BridgeMessagesDB interface {
...@@ -78,22 +75,13 @@ func (db bridgeMessagesDB) StoreL1BridgeMessages(messages []*L1BridgeMessage) er ...@@ -78,22 +75,13 @@ func (db bridgeMessagesDB) StoreL1BridgeMessages(messages []*L1BridgeMessage) er
return result.Error return result.Error
} }
func (db bridgeMessagesDB) L1BridgeMessage(nonce *big.Int) (*L1BridgeMessage, error) { func (db bridgeMessagesDB) L1BridgeMessage(msgHash common.Hash) (*L1BridgeMessage, error) {
var sentMessage L1BridgeMessage return db.L1BridgeMessageWithFilter(BridgeMessage{MessageHash: msgHash})
result := db.gorm.Where(&BridgeMessage{Nonce: U256{Int: nonce}}).Take(&sentMessage)
if result.Error != nil {
if errors.Is(result.Error, gorm.ErrRecordNotFound) {
return nil, nil
}
return nil, result.Error
}
return &sentMessage, nil
} }
func (db bridgeMessagesDB) L1BridgeMessageByHash(messageHash common.Hash) (*L1BridgeMessage, error) { func (db bridgeMessagesDB) L1BridgeMessageWithFilter(filter BridgeMessage) (*L1BridgeMessage, error) {
var sentMessage L1BridgeMessage var sentMessage L1BridgeMessage
result := db.gorm.Where(&BridgeMessage{MessageHash: messageHash}).Take(&sentMessage) result := db.gorm.Where(&filter).Take(&sentMessage)
if result.Error != nil { if result.Error != nil {
if errors.Is(result.Error, gorm.ErrRecordNotFound) { if errors.Is(result.Error, gorm.ErrRecordNotFound) {
return nil, nil return nil, nil
...@@ -104,25 +92,8 @@ func (db bridgeMessagesDB) L1BridgeMessageByHash(messageHash common.Hash) (*L1Br ...@@ -104,25 +92,8 @@ func (db bridgeMessagesDB) L1BridgeMessageByHash(messageHash common.Hash) (*L1Br
return &sentMessage, nil return &sentMessage, nil
} }
func (db bridgeMessagesDB) LatestL1BridgeMessageNonce() (*big.Int, error) {
var sentMessage L1BridgeMessage
result := db.gorm.Order("nonce DESC").Take(&sentMessage)
if result.Error != nil {
if errors.Is(result.Error, gorm.ErrRecordNotFound) {
return nil, nil
}
return nil, result.Error
}
return sentMessage.Nonce.Int, nil
}
/**
* Arbitrary Messages Sent from L2
*/
func (db bridgeMessagesDB) MarkRelayedL1BridgeMessage(messageHash common.Hash, relayEvent uuid.UUID) error { func (db bridgeMessagesDB) MarkRelayedL1BridgeMessage(messageHash common.Hash, relayEvent uuid.UUID) error {
message, err := db.L1BridgeMessageByHash(messageHash) message, err := db.L1BridgeMessage(messageHash)
if err != nil { if err != nil {
return err return err
} else if message == nil { } else if message == nil {
...@@ -134,27 +105,22 @@ func (db bridgeMessagesDB) MarkRelayedL1BridgeMessage(messageHash common.Hash, r ...@@ -134,27 +105,22 @@ func (db bridgeMessagesDB) MarkRelayedL1BridgeMessage(messageHash common.Hash, r
return result.Error return result.Error
} }
/**
* Arbitrary Messages Sent from L2
*/
func (db bridgeMessagesDB) StoreL2BridgeMessages(messages []*L2BridgeMessage) error { func (db bridgeMessagesDB) StoreL2BridgeMessages(messages []*L2BridgeMessage) error {
result := db.gorm.Create(&messages) result := db.gorm.Create(&messages)
return result.Error return result.Error
} }
func (db bridgeMessagesDB) L2BridgeMessage(nonce *big.Int) (*L2BridgeMessage, error) { func (db bridgeMessagesDB) L2BridgeMessage(msgHash common.Hash) (*L2BridgeMessage, error) {
var sentMessage L2BridgeMessage return db.L2BridgeMessageWithFilter(BridgeMessage{MessageHash: msgHash})
result := db.gorm.Where(&BridgeMessage{Nonce: U256{Int: nonce}}).Take(&sentMessage)
if result.Error != nil {
if errors.Is(result.Error, gorm.ErrRecordNotFound) {
return nil, nil
}
return nil, result.Error
}
return &sentMessage, nil
} }
func (db bridgeMessagesDB) L2BridgeMessageByHash(messageHash common.Hash) (*L2BridgeMessage, error) { func (db bridgeMessagesDB) L2BridgeMessageWithFilter(filter BridgeMessage) (*L2BridgeMessage, error) {
var sentMessage L2BridgeMessage var sentMessage L2BridgeMessage
result := db.gorm.Where(&BridgeMessage{MessageHash: messageHash}).Take(&sentMessage) result := db.gorm.Where(&filter).Take(&sentMessage)
if result.Error != nil { if result.Error != nil {
if errors.Is(result.Error, gorm.ErrRecordNotFound) { if errors.Is(result.Error, gorm.ErrRecordNotFound) {
return nil, nil return nil, nil
...@@ -165,21 +131,8 @@ func (db bridgeMessagesDB) L2BridgeMessageByHash(messageHash common.Hash) (*L2Br ...@@ -165,21 +131,8 @@ func (db bridgeMessagesDB) L2BridgeMessageByHash(messageHash common.Hash) (*L2Br
return &sentMessage, nil return &sentMessage, nil
} }
func (db bridgeMessagesDB) LatestL2BridgeMessageNonce() (*big.Int, error) {
var sentMessage L2BridgeMessage
result := db.gorm.Order("nonce DESC").Take(&sentMessage)
if result.Error != nil {
if errors.Is(result.Error, gorm.ErrRecordNotFound) {
return nil, nil
}
return nil, result.Error
}
return sentMessage.Nonce.Int, nil
}
func (db bridgeMessagesDB) MarkRelayedL2BridgeMessage(messageHash common.Hash, relayEvent uuid.UUID) error { func (db bridgeMessagesDB) MarkRelayedL2BridgeMessage(messageHash common.Hash, relayEvent uuid.UUID) error {
message, err := db.L2BridgeMessageByHash(messageHash) message, err := db.L2BridgeMessage(messageHash)
if err != nil { if err != nil {
return err return err
} else if message == nil { } else if message == nil {
......
...@@ -24,21 +24,16 @@ type Transaction struct { ...@@ -24,21 +24,16 @@ type Transaction struct {
} }
type L1TransactionDeposit struct { type L1TransactionDeposit struct {
SourceHash common.Hash `gorm:"serializer:json;primaryKey"` SourceHash common.Hash `gorm:"serializer:json;primaryKey"`
L2TransactionHash common.Hash `gorm:"serializer:json"` L2TransactionHash common.Hash `gorm:"serializer:json"`
InitiatedL1EventGUID uuid.UUID InitiatedL1EventGUID uuid.UUID
Version U256
OpaqueData hexutil.Bytes `gorm:"serializer:json"`
Tx Transaction `gorm:"embedded"` Tx Transaction `gorm:"embedded"`
GasLimit U256 GasLimit U256
} }
type L2TransactionWithdrawal struct { type L2TransactionWithdrawal struct {
WithdrawalHash common.Hash `gorm:"serializer:json;primaryKey"` WithdrawalHash common.Hash `gorm:"serializer:json;primaryKey"`
Nonce U256 Nonce U256
InitiatedL2EventGUID uuid.UUID InitiatedL2EventGUID uuid.UUID
......
...@@ -2,31 +2,39 @@ package database ...@@ -2,31 +2,39 @@ package database
import ( import (
"errors" "errors"
"math/big"
"gorm.io/gorm" "gorm.io/gorm"
"github.com/ethereum-optimism/optimism/op-bindings/predeploys"
"github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common"
) )
var (
ETHTokenPair = TokenPair{LocalTokenAddress: predeploys.LegacyERC20ETHAddr, RemoteTokenAddress: predeploys.LegacyERC20ETHAddr}
)
/** /**
* Types * Types
*/ */
type TokenPair struct { type TokenPair struct {
L1TokenAddress common.Address `gorm:"serializer:json"` LocalTokenAddress common.Address `gorm:"serializer:json"`
L2TokenAddress common.Address `gorm:"serializer:json"` RemoteTokenAddress common.Address `gorm:"serializer:json"`
} }
type L1BridgeDeposit struct { type BridgeTransfer struct {
TransactionSourceHash common.Hash `gorm:"primaryKey;serializer:json"` CrossDomainMessageHash *common.Hash `gorm:"serializer:json"`
CrossDomainMessengerNonce *U256
Tx Transaction `gorm:"embedded"` Tx Transaction `gorm:"embedded"`
TokenPair TokenPair `gorm:"embedded"` TokenPair TokenPair `gorm:"embedded"`
} }
type L1BridgeDeposit struct {
BridgeTransfer `gorm:"embedded"`
TransactionSourceHash common.Hash `gorm:"primaryKey;serializer:json"`
}
type L1BridgeDepositWithTransactionHashes struct { type L1BridgeDepositWithTransactionHashes struct {
L1BridgeDeposit L1BridgeDeposit `gorm:"embedded"` L1BridgeDeposit L1BridgeDeposit `gorm:"embedded"`
...@@ -35,12 +43,9 @@ type L1BridgeDepositWithTransactionHashes struct { ...@@ -35,12 +43,9 @@ type L1BridgeDepositWithTransactionHashes struct {
} }
type L2BridgeWithdrawal struct { type L2BridgeWithdrawal struct {
TransactionWithdrawalHash common.Hash `gorm:"primaryKey;serializer:json"` BridgeTransfer `gorm:"embedded"`
CrossDomainMessengerNonce *U256
Tx Transaction `gorm:"embedded"` TransactionWithdrawalHash common.Hash `gorm:"primaryKey;serializer:json"`
TokenPair TokenPair `gorm:"embedded"`
} }
type L2BridgeWithdrawalWithTransactionHashes struct { type L2BridgeWithdrawalWithTransactionHashes struct {
...@@ -53,11 +58,11 @@ type L2BridgeWithdrawalWithTransactionHashes struct { ...@@ -53,11 +58,11 @@ type L2BridgeWithdrawalWithTransactionHashes struct {
type BridgeTransfersView interface { type BridgeTransfersView interface {
L1BridgeDeposit(common.Hash) (*L1BridgeDeposit, error) L1BridgeDeposit(common.Hash) (*L1BridgeDeposit, error)
L1BridgeDepositByCrossDomainMessengerNonce(*big.Int) (*L1BridgeDeposit, error) L1BridgeDepositWithFilter(BridgeTransfer) (*L1BridgeDeposit, error)
L1BridgeDepositsByAddress(common.Address) ([]*L1BridgeDepositWithTransactionHashes, error) L1BridgeDepositsByAddress(common.Address) ([]*L1BridgeDepositWithTransactionHashes, error)
L2BridgeWithdrawal(common.Hash) (*L2BridgeWithdrawal, error) L2BridgeWithdrawal(common.Hash) (*L2BridgeWithdrawal, error)
L2BridgeWithdrawalByCrossDomainMessengerNonce(*big.Int) (*L2BridgeWithdrawal, error) L2BridgeWithdrawalWithFilter(BridgeTransfer) (*L2BridgeWithdrawal, error)
L2BridgeWithdrawalsByAddress(common.Address) ([]*L2BridgeWithdrawalWithTransactionHashes, error) L2BridgeWithdrawalsByAddress(common.Address) ([]*L2BridgeWithdrawalWithTransactionHashes, error)
} }
...@@ -104,9 +109,9 @@ func (db *bridgeTransfersDB) L1BridgeDeposit(txSourceHash common.Hash) (*L1Bridg ...@@ -104,9 +109,9 @@ func (db *bridgeTransfersDB) L1BridgeDeposit(txSourceHash common.Hash) (*L1Bridg
// L1BridgeDepositByCrossDomainMessengerNonce retrieves tokens deposited, specified by the associated `L1CrossDomainMessenger` nonce. // L1BridgeDepositByCrossDomainMessengerNonce retrieves tokens deposited, specified by the associated `L1CrossDomainMessenger` nonce.
// All tokens bridged via the StandardBridge flows through the L1CrossDomainMessenger // All tokens bridged via the StandardBridge flows through the L1CrossDomainMessenger
func (db *bridgeTransfersDB) L1BridgeDepositByCrossDomainMessengerNonce(nonce *big.Int) (*L1BridgeDeposit, error) { func (db *bridgeTransfersDB) L1BridgeDepositWithFilter(filter BridgeTransfer) (*L1BridgeDeposit, error) {
var deposit L1BridgeDeposit var deposit L1BridgeDeposit
result := db.gorm.Where(&L1BridgeDeposit{CrossDomainMessengerNonce: &U256{Int: nonce}}).Take(&deposit) result := db.gorm.Where(&filter).Take(&deposit)
if result.Error != nil { if result.Error != nil {
if errors.Is(result.Error, gorm.ErrRecordNotFound) { if errors.Is(result.Error, gorm.ErrRecordNotFound) {
return nil, nil return nil, nil
...@@ -125,6 +130,7 @@ l1_bridge_deposits.*, ...@@ -125,6 +130,7 @@ l1_bridge_deposits.*,
l1_contract_events.transaction_hash AS l1_transaction_hash, l1_contract_events.transaction_hash AS l1_transaction_hash,
l1_transaction_deposits.l2_transaction_hash`) l1_transaction_deposits.l2_transaction_hash`)
// TODO join with l1_tokens and l2_tokens
depositsQuery = depositsQuery.Joins("INNER JOIN l1_transaction_deposits ON l1_bridge_deposits.transaction_source_hash = l1_transaction_deposits.source_hash") depositsQuery = depositsQuery.Joins("INNER JOIN l1_transaction_deposits ON l1_bridge_deposits.transaction_source_hash = l1_transaction_deposits.source_hash")
depositsQuery = depositsQuery.Joins("INNER JOIN l1_contract_events ON l1_transaction_deposits.initiated_l1_event_guid = l1_contract_events.guid") depositsQuery = depositsQuery.Joins("INNER JOIN l1_contract_events ON l1_transaction_deposits.initiated_l1_event_guid = l1_contract_events.guid")
...@@ -167,9 +173,9 @@ func (db *bridgeTransfersDB) L2BridgeWithdrawal(txWithdrawalHash common.Hash) (* ...@@ -167,9 +173,9 @@ func (db *bridgeTransfersDB) L2BridgeWithdrawal(txWithdrawalHash common.Hash) (*
// L2BridgeWithdrawalByCrossDomainMessengerNonce retrieves tokens withdrawn, specified by the associated `L2CrossDomainMessenger` nonce. // L2BridgeWithdrawalByCrossDomainMessengerNonce retrieves tokens withdrawn, specified by the associated `L2CrossDomainMessenger` nonce.
// All tokens bridged via the StandardBridge flows through the L2CrossDomainMessenger // All tokens bridged via the StandardBridge flows through the L2CrossDomainMessenger
func (db *bridgeTransfersDB) L2BridgeWithdrawalByCrossDomainMessengerNonce(nonce *big.Int) (*L2BridgeWithdrawal, error) { func (db *bridgeTransfersDB) L2BridgeWithdrawalWithFilter(filter BridgeTransfer) (*L2BridgeWithdrawal, error) {
var withdrawal L2BridgeWithdrawal var withdrawal L2BridgeWithdrawal
result := db.gorm.Where(&L2BridgeWithdrawal{CrossDomainMessengerNonce: &U256{Int: nonce}}).Take(&withdrawal) result := db.gorm.Where(filter).Take(&withdrawal)
if result.Error != nil { if result.Error != nil {
if errors.Is(result.Error, gorm.ErrRecordNotFound) { if errors.Is(result.Error, gorm.ErrRecordNotFound) {
return nil, nil return nil, nil
......
...@@ -101,7 +101,7 @@ services: ...@@ -101,7 +101,7 @@ services:
gateway-frontend: gateway-frontend:
command: pnpm nx start @gateway/frontend --host 0.0.0.0 --port 5173 command: pnpm nx start @gateway/frontend --host 0.0.0.0 --port 5173
# Change tag to `latest` after https://github.com/ethereum-optimism/gateway/pull/2541 merges # Change tag to `latest` after https://github.com/ethereum-optimism/gateway/pull/2541 merges
image: ethereumoptimism/gateway-frontend:0687c408e4f85cbe81acf7a197e861df8c7282df image: ethereumoptimism/gateway-frontend:latest
ports: ports:
- 5173:5173 - 5173:5173
healthcheck: healthcheck:
......
...@@ -57,12 +57,12 @@ func TestE2EBridgeL1CrossDomainMessenger(t *testing.T) { ...@@ -57,12 +57,12 @@ func TestE2EBridgeL1CrossDomainMessenger(t *testing.T) {
nonceBytes := [31]byte{0: byte(1)} nonceBytes := [31]byte{0: byte(1)}
nonce := new(big.Int).SetBytes(nonceBytes[:]) nonce := new(big.Int).SetBytes(nonceBytes[:])
sentMessage, err := testSuite.DB.BridgeMessages.L1BridgeMessage(nonce) sentMessage, err := testSuite.DB.BridgeMessages.L1BridgeMessage(parsedMessage.MessageHash)
require.NoError(t, err) require.NoError(t, err)
require.NotNil(t, sentMessage) require.NotNil(t, sentMessage)
require.NotNil(t, sentMessage.SentMessageEventGUID) require.NotNil(t, sentMessage.SentMessageEventGUID)
require.Equal(t, depositInfo.DepositTx.SourceHash, sentMessage.TransactionSourceHash) require.Equal(t, depositInfo.DepositTx.SourceHash, sentMessage.TransactionSourceHash)
require.Equal(t, parsedMessage.MessageHash, sentMessage.MessageHash) require.Equal(t, nonce.Uint64(), sentMessage.Nonce.Int.Uint64())
require.Equal(t, uint64(100_000), sentMessage.GasLimit.Int.Uint64()) require.Equal(t, uint64(100_000), sentMessage.GasLimit.Int.Uint64())
require.Equal(t, big.NewInt(params.Ether), sentMessage.Tx.Amount.Int) require.Equal(t, big.NewInt(params.Ether), sentMessage.Tx.Amount.Int)
require.Equal(t, aliceAddr, sentMessage.Tx.FromAddress) require.Equal(t, aliceAddr, sentMessage.Tx.FromAddress)
...@@ -83,7 +83,7 @@ func TestE2EBridgeL1CrossDomainMessenger(t *testing.T) { ...@@ -83,7 +83,7 @@ func TestE2EBridgeL1CrossDomainMessenger(t *testing.T) {
return l2Header != nil && l2Header.Number.Uint64() >= depositReceipt.BlockNumber.Uint64(), nil return l2Header != nil && l2Header.Number.Uint64() >= depositReceipt.BlockNumber.Uint64(), nil
})) }))
sentMessage, err = testSuite.DB.BridgeMessages.L1BridgeMessage(nonce) sentMessage, err = testSuite.DB.BridgeMessages.L1BridgeMessage(parsedMessage.MessageHash)
require.NoError(t, err) require.NoError(t, err)
require.NotNil(t, sentMessage) require.NotNil(t, sentMessage)
require.NotNil(t, sentMessage.RelayedMessageEventGUID) require.NotNil(t, sentMessage.RelayedMessageEventGUID)
...@@ -143,12 +143,12 @@ func TestE2EBridgeL2CrossDomainMessenger(t *testing.T) { ...@@ -143,12 +143,12 @@ func TestE2EBridgeL2CrossDomainMessenger(t *testing.T) {
nonceBytes := [31]byte{0: byte(1)} nonceBytes := [31]byte{0: byte(1)}
nonce := new(big.Int).SetBytes(nonceBytes[:]) nonce := new(big.Int).SetBytes(nonceBytes[:])
sentMessage, err := testSuite.DB.BridgeMessages.L2BridgeMessage(nonce) sentMessage, err := testSuite.DB.BridgeMessages.L2BridgeMessage(parsedMessage.MessageHash)
require.NoError(t, err) require.NoError(t, err)
require.NotNil(t, sentMessage) require.NotNil(t, sentMessage)
require.NotNil(t, sentMessage.SentMessageEventGUID) require.NotNil(t, sentMessage.SentMessageEventGUID)
require.Equal(t, withdrawalHash, sentMessage.TransactionWithdrawalHash) require.Equal(t, withdrawalHash, sentMessage.TransactionWithdrawalHash)
require.Equal(t, parsedMessage.MessageHash, sentMessage.MessageHash) require.Equal(t, nonce.Uint64(), sentMessage.Nonce.Int.Uint64())
require.Equal(t, uint64(100_000), sentMessage.GasLimit.Int.Uint64()) require.Equal(t, uint64(100_000), sentMessage.GasLimit.Int.Uint64())
require.Equal(t, big.NewInt(params.Ether), sentMessage.Tx.Amount.Int) require.Equal(t, big.NewInt(params.Ether), sentMessage.Tx.Amount.Int)
require.Equal(t, aliceAddr, sentMessage.Tx.FromAddress) require.Equal(t, aliceAddr, sentMessage.Tx.FromAddress)
...@@ -166,7 +166,7 @@ func TestE2EBridgeL2CrossDomainMessenger(t *testing.T) { ...@@ -166,7 +166,7 @@ func TestE2EBridgeL2CrossDomainMessenger(t *testing.T) {
})) }))
// message is marked as relayed // message is marked as relayed
sentMessage, err = testSuite.DB.BridgeMessages.L2BridgeMessage(nonce) sentMessage, err = testSuite.DB.BridgeMessages.L2BridgeMessage(parsedMessage.MessageHash)
require.NoError(t, err) require.NoError(t, err)
require.NotNil(t, sentMessage) require.NotNil(t, sentMessage)
require.NotNil(t, sentMessage.RelayedMessageEventGUID) require.NotNil(t, sentMessage.RelayedMessageEventGUID)
......
...@@ -59,9 +59,6 @@ func TestE2EBridgeTransactionsOptimismPortalDeposits(t *testing.T) { ...@@ -59,9 +59,6 @@ func TestE2EBridgeTransactionsOptimismPortalDeposits(t *testing.T) {
require.Equal(t, aliceAddr, deposit.Tx.ToAddress) require.Equal(t, aliceAddr, deposit.Tx.ToAddress)
require.ElementsMatch(t, calldata, deposit.Tx.Data) require.ElementsMatch(t, calldata, deposit.Tx.Data)
require.Equal(t, depositInfo.Version.Uint64(), deposit.Version.Int.Uint64())
require.ElementsMatch(t, depositInfo.OpaqueData, deposit.OpaqueData)
event, err := testSuite.DB.ContractEvents.L1ContractEvent(deposit.InitiatedL1EventGUID) event, err := testSuite.DB.ContractEvents.L1ContractEvent(deposit.InitiatedL1EventGUID)
require.NoError(t, err) require.NoError(t, err)
require.NotNil(t, event) require.NotNil(t, event)
......
...@@ -7,7 +7,6 @@ import ( ...@@ -7,7 +7,6 @@ import (
"time" "time"
e2etest_utils "github.com/ethereum-optimism/optimism/indexer/e2e_tests/utils" e2etest_utils "github.com/ethereum-optimism/optimism/indexer/e2e_tests/utils"
"github.com/ethereum-optimism/optimism/indexer/processor"
op_e2e "github.com/ethereum-optimism/optimism/op-e2e" op_e2e "github.com/ethereum-optimism/optimism/op-e2e"
"github.com/ethereum-optimism/optimism/op-e2e/e2eutils/wait" "github.com/ethereum-optimism/optimism/op-e2e/e2eutils/wait"
"github.com/ethereum-optimism/optimism/op-node/withdrawals" "github.com/ethereum-optimism/optimism/op-node/withdrawals"
...@@ -56,8 +55,8 @@ func TestE2EBridgeTransfersStandardBridgeETHDeposit(t *testing.T) { ...@@ -56,8 +55,8 @@ func TestE2EBridgeTransfersStandardBridgeETHDeposit(t *testing.T) {
deposit := aliceDeposits[0].L1BridgeDeposit deposit := aliceDeposits[0].L1BridgeDeposit
require.Equal(t, depositInfo.DepositTx.SourceHash, deposit.TransactionSourceHash) require.Equal(t, depositInfo.DepositTx.SourceHash, deposit.TransactionSourceHash)
require.Equal(t, predeploys.LegacyERC20ETHAddr, deposit.TokenPair.L1TokenAddress) require.Equal(t, predeploys.LegacyERC20ETHAddr, deposit.TokenPair.LocalTokenAddress)
require.Equal(t, predeploys.LegacyERC20ETHAddr, deposit.TokenPair.L2TokenAddress) require.Equal(t, predeploys.LegacyERC20ETHAddr, deposit.TokenPair.RemoteTokenAddress)
require.Equal(t, big.NewInt(params.Ether), deposit.Tx.Amount.Int) require.Equal(t, big.NewInt(params.Ether), deposit.Tx.Amount.Int)
require.Equal(t, aliceAddr, deposit.Tx.FromAddress) require.Equal(t, aliceAddr, deposit.Tx.FromAddress)
require.Equal(t, aliceAddr, deposit.Tx.ToAddress) require.Equal(t, aliceAddr, deposit.Tx.ToAddress)
...@@ -65,9 +64,7 @@ func TestE2EBridgeTransfersStandardBridgeETHDeposit(t *testing.T) { ...@@ -65,9 +64,7 @@ func TestE2EBridgeTransfersStandardBridgeETHDeposit(t *testing.T) {
// StandardBridge flows through the messenger. We remove the first two significant // StandardBridge flows through the messenger. We remove the first two significant
// bytes of the nonce dedicated to the version. nonce == 0 (first message) // bytes of the nonce dedicated to the version. nonce == 0 (first message)
require.NotNil(t, deposit.CrossDomainMessengerNonce) require.NotNil(t, deposit.CrossDomainMessageHash)
_, nonce := processor.DecodeVersionedNonce(deposit.CrossDomainMessengerNonce.Int)
require.Zero(t, nonce.Uint64())
// (2) Test Deposit Finalization via CrossDomainMessenger relayed message // (2) Test Deposit Finalization via CrossDomainMessenger relayed message
depositReceipt, err = wait.ForReceiptOK(context.Background(), testSuite.L2Client, types.NewTx(depositInfo.DepositTx).Hash()) depositReceipt, err = wait.ForReceiptOK(context.Background(), testSuite.L2Client, types.NewTx(depositInfo.DepositTx).Hash())
...@@ -77,7 +74,7 @@ func TestE2EBridgeTransfersStandardBridgeETHDeposit(t *testing.T) { ...@@ -77,7 +74,7 @@ func TestE2EBridgeTransfersStandardBridgeETHDeposit(t *testing.T) {
return l2Header != nil && l2Header.Number.Uint64() >= depositReceipt.BlockNumber.Uint64(), nil return l2Header != nil && l2Header.Number.Uint64() >= depositReceipt.BlockNumber.Uint64(), nil
})) }))
crossDomainBridgeMessage, err := testSuite.DB.BridgeMessages.L1BridgeMessage(deposit.CrossDomainMessengerNonce.Int) crossDomainBridgeMessage, err := testSuite.DB.BridgeMessages.L1BridgeMessage(*deposit.CrossDomainMessageHash)
require.NoError(t, err) require.NoError(t, err)
require.NotNil(t, crossDomainBridgeMessage) require.NotNil(t, crossDomainBridgeMessage)
require.NotNil(t, crossDomainBridgeMessage.RelayedMessageEventGUID) require.NotNil(t, crossDomainBridgeMessage.RelayedMessageEventGUID)
...@@ -117,18 +114,28 @@ func TestE2EBridgeTransfersOptimismPortalETHReceive(t *testing.T) { ...@@ -117,18 +114,28 @@ func TestE2EBridgeTransfersOptimismPortalETHReceive(t *testing.T) {
deposit := aliceDeposits[0].L1BridgeDeposit deposit := aliceDeposits[0].L1BridgeDeposit
require.Equal(t, depositInfo.DepositTx.SourceHash, deposit.TransactionSourceHash) require.Equal(t, depositInfo.DepositTx.SourceHash, deposit.TransactionSourceHash)
require.Equal(t, predeploys.LegacyERC20ETHAddr, deposit.TokenPair.L1TokenAddress) require.Equal(t, predeploys.LegacyERC20ETHAddr, deposit.TokenPair.LocalTokenAddress)
require.Equal(t, predeploys.LegacyERC20ETHAddr, deposit.TokenPair.L2TokenAddress) require.Equal(t, predeploys.LegacyERC20ETHAddr, deposit.TokenPair.RemoteTokenAddress)
require.Equal(t, big.NewInt(params.Ether), deposit.Tx.Amount.Int) require.Equal(t, big.NewInt(params.Ether), deposit.Tx.Amount.Int)
require.Equal(t, aliceAddr, deposit.Tx.FromAddress) require.Equal(t, aliceAddr, deposit.Tx.FromAddress)
require.Equal(t, aliceAddr, deposit.Tx.ToAddress) require.Equal(t, aliceAddr, deposit.Tx.ToAddress)
require.Len(t, deposit.Tx.Data, 0) require.Len(t, deposit.Tx.Data, 0)
// deposit was not sent through the cross domain messenger // deposit was not sent through the cross domain messenger
require.Nil(t, deposit.CrossDomainMessengerNonce) require.Nil(t, deposit.CrossDomainMessageHash)
// (2) Test Deposit Finalization // (2) Test Deposit Finalization
// Nothing to do as we rely on the derivation process to include the deposit depositReceipt, err := wait.ForReceiptOK(context.Background(), testSuite.L2Client, types.NewTx(depositInfo.DepositTx).Hash())
require.NoError(t, err)
require.NoError(t, wait.For(context.Background(), 500*time.Millisecond, func() (bool, error) {
l2Header := testSuite.Indexer.L2Processor.LatestProcessedHeader()
return l2Header != nil && l2Header.Number.Uint64() >= depositReceipt.BlockNumber.Uint64(), nil
}))
// Still nil as the withdrawal did not occur through the standard bridge
aliceDeposits, err = testSuite.DB.BridgeTransfers.L1BridgeDepositsByAddress(aliceAddr)
require.NoError(t, err)
require.Nil(t, aliceDeposits[0].L1BridgeDeposit.CrossDomainMessageHash)
} }
func TestE2EBridgeTransfersStandardBridgeETHWithdrawal(t *testing.T) { func TestE2EBridgeTransfersStandardBridgeETHWithdrawal(t *testing.T) {
...@@ -178,8 +185,8 @@ func TestE2EBridgeTransfersStandardBridgeETHWithdrawal(t *testing.T) { ...@@ -178,8 +185,8 @@ func TestE2EBridgeTransfersStandardBridgeETHWithdrawal(t *testing.T) {
withdrawal := aliceWithdrawals[0].L2BridgeWithdrawal withdrawal := aliceWithdrawals[0].L2BridgeWithdrawal
require.Equal(t, withdrawalHash, withdrawal.TransactionWithdrawalHash) require.Equal(t, withdrawalHash, withdrawal.TransactionWithdrawalHash)
require.Equal(t, predeploys.LegacyERC20ETHAddr, withdrawal.TokenPair.L1TokenAddress) require.Equal(t, predeploys.LegacyERC20ETHAddr, withdrawal.TokenPair.LocalTokenAddress)
require.Equal(t, predeploys.LegacyERC20ETHAddr, withdrawal.TokenPair.L2TokenAddress) require.Equal(t, predeploys.LegacyERC20ETHAddr, withdrawal.TokenPair.RemoteTokenAddress)
require.Equal(t, big.NewInt(params.Ether), withdrawal.Tx.Amount.Int) require.Equal(t, big.NewInt(params.Ether), withdrawal.Tx.Amount.Int)
require.Equal(t, aliceAddr, withdrawal.Tx.FromAddress) require.Equal(t, aliceAddr, withdrawal.Tx.FromAddress)
require.Equal(t, aliceAddr, withdrawal.Tx.ToAddress) require.Equal(t, aliceAddr, withdrawal.Tx.ToAddress)
...@@ -187,9 +194,11 @@ func TestE2EBridgeTransfersStandardBridgeETHWithdrawal(t *testing.T) { ...@@ -187,9 +194,11 @@ func TestE2EBridgeTransfersStandardBridgeETHWithdrawal(t *testing.T) {
// StandardBridge flows through the messenger. We remove the first two // StandardBridge flows through the messenger. We remove the first two
// bytes of the nonce dedicated to the version. nonce == 0 (first message) // bytes of the nonce dedicated to the version. nonce == 0 (first message)
require.NotNil(t, withdrawal.CrossDomainMessengerNonce) require.NotNil(t, withdrawal.CrossDomainMessageHash)
_, nonce := processor.DecodeVersionedNonce(withdrawal.CrossDomainMessengerNonce.Int)
require.Zero(t, nonce.Uint64()) crossDomainBridgeMessage, err := testSuite.DB.BridgeMessages.L2BridgeMessage(*withdrawal.CrossDomainMessageHash)
require.NoError(t, err)
require.Nil(t, crossDomainBridgeMessage.RelayedMessageEventGUID)
// (2) Test Withdrawal Proven/Finalized. Test the sql join queries to populate the right transaction // (2) Test Withdrawal Proven/Finalized. Test the sql join queries to populate the right transaction
require.Empty(t, aliceWithdrawals[0].ProvenL1TransactionHash) require.Empty(t, aliceWithdrawals[0].ProvenL1TransactionHash)
...@@ -206,6 +215,11 @@ func TestE2EBridgeTransfersStandardBridgeETHWithdrawal(t *testing.T) { ...@@ -206,6 +215,11 @@ func TestE2EBridgeTransfersStandardBridgeETHWithdrawal(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
require.Equal(t, proveReceipt.TxHash, aliceWithdrawals[0].ProvenL1TransactionHash) require.Equal(t, proveReceipt.TxHash, aliceWithdrawals[0].ProvenL1TransactionHash)
require.Equal(t, finalizeReceipt.TxHash, aliceWithdrawals[0].FinalizedL1TransactionHash) require.Equal(t, finalizeReceipt.TxHash, aliceWithdrawals[0].FinalizedL1TransactionHash)
crossDomainBridgeMessage, err = testSuite.DB.BridgeMessages.L2BridgeMessage(*withdrawal.CrossDomainMessageHash)
require.NoError(t, err)
require.NotNil(t, crossDomainBridgeMessage)
require.NotNil(t, crossDomainBridgeMessage.RelayedMessageEventGUID)
} }
func TestE2EBridgeTransfersL2ToL1MessagePasserReceive(t *testing.T) { func TestE2EBridgeTransfersL2ToL1MessagePasserReceive(t *testing.T) {
...@@ -254,15 +268,15 @@ func TestE2EBridgeTransfersL2ToL1MessagePasserReceive(t *testing.T) { ...@@ -254,15 +268,15 @@ func TestE2EBridgeTransfersL2ToL1MessagePasserReceive(t *testing.T) {
withdrawal := aliceWithdrawals[0].L2BridgeWithdrawal withdrawal := aliceWithdrawals[0].L2BridgeWithdrawal
require.Equal(t, withdrawalHash, withdrawal.TransactionWithdrawalHash) require.Equal(t, withdrawalHash, withdrawal.TransactionWithdrawalHash)
require.Equal(t, predeploys.LegacyERC20ETHAddr, withdrawal.TokenPair.L1TokenAddress) require.Equal(t, predeploys.LegacyERC20ETHAddr, withdrawal.TokenPair.LocalTokenAddress)
require.Equal(t, predeploys.LegacyERC20ETHAddr, withdrawal.TokenPair.L2TokenAddress) require.Equal(t, predeploys.LegacyERC20ETHAddr, withdrawal.TokenPair.RemoteTokenAddress)
require.Equal(t, big.NewInt(params.Ether), withdrawal.Tx.Amount.Int) require.Equal(t, big.NewInt(params.Ether), withdrawal.Tx.Amount.Int)
require.Equal(t, aliceAddr, withdrawal.Tx.FromAddress) require.Equal(t, aliceAddr, withdrawal.Tx.FromAddress)
require.Equal(t, aliceAddr, withdrawal.Tx.ToAddress) require.Equal(t, aliceAddr, withdrawal.Tx.ToAddress)
require.Len(t, withdrawal.Tx.Data, 0) require.Len(t, withdrawal.Tx.Data, 0)
// withdrawal was not sent through the cross domain messenger // withdrawal was not sent through the cross domain messenger
require.Nil(t, withdrawal.CrossDomainMessengerNonce) require.Nil(t, withdrawal.CrossDomainMessageHash)
// (2) Test Withdrawal Proven/Finalized. Test the sql join queries to populate the right transaction // (2) Test Withdrawal Proven/Finalized. Test the sql join queries to populate the right transaction
require.Empty(t, aliceWithdrawals[0].ProvenL1TransactionHash) require.Empty(t, aliceWithdrawals[0].ProvenL1TransactionHash)
...@@ -279,4 +293,7 @@ func TestE2EBridgeTransfersL2ToL1MessagePasserReceive(t *testing.T) { ...@@ -279,4 +293,7 @@ func TestE2EBridgeTransfersL2ToL1MessagePasserReceive(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
require.Equal(t, proveReceipt.TxHash, aliceWithdrawals[0].ProvenL1TransactionHash) require.Equal(t, proveReceipt.TxHash, aliceWithdrawals[0].ProvenL1TransactionHash)
require.Equal(t, finalizeReceipt.TxHash, aliceWithdrawals[0].FinalizedL1TransactionHash) require.Equal(t, finalizeReceipt.TxHash, aliceWithdrawals[0].FinalizedL1TransactionHash)
// Still nil as the withdrawal did not occur through the standard bridge
require.Nil(t, aliceWithdrawals[0].L2BridgeWithdrawal.CrossDomainMessageHash)
} }
...@@ -16,7 +16,6 @@ import ( ...@@ -16,7 +16,6 @@ import (
op_e2e "github.com/ethereum-optimism/optimism/op-e2e" op_e2e "github.com/ethereum-optimism/optimism/op-e2e"
"github.com/ethereum-optimism/optimism/op-node/testlog" "github.com/ethereum-optimism/optimism/op-node/testlog"
op_log "github.com/ethereum-optimism/optimism/op-service/log"
"github.com/ethereum/go-ethereum/ethclient" "github.com/ethereum/go-ethereum/ethclient"
"github.com/ethereum/go-ethereum/log" "github.com/ethereum/go-ethereum/log"
...@@ -60,9 +59,6 @@ func createE2ETestSuite(t *testing.T) E2ETestSuite { ...@@ -60,9 +59,6 @@ func createE2ETestSuite(t *testing.T) E2ETestSuite {
// Indexer Configuration and Start // Indexer Configuration and Start
indexerCfg := config.Config{ indexerCfg := config.Config{
Logger: op_log.CLIConfig{
Level: "warn",
},
DB: config.DBConfig{ DB: config.DBConfig{
Host: "127.0.0.1", Host: "127.0.0.1",
Port: 5432, Port: 5432,
......
...@@ -22,11 +22,3 @@ port = 8080 ...@@ -22,11 +22,3 @@ port = 8080
host = "127.0.0.1" host = "127.0.0.1"
port = 7300 port = 7300
[logger]
# Log level: trace, debug, info, warn, error, crit. Capitals are accepted too.
level = "info"
# Color the log output. Defaults to true if terminal is detected.
color = true
# Format the log output. Supported formats: 'text', 'terminal', 'logfmt', 'json', 'json-pretty'
format = "terminal"
...@@ -84,6 +84,31 @@ CREATE TABLE IF NOT EXISTS output_proposals ( ...@@ -84,6 +84,31 @@ CREATE TABLE IF NOT EXISTS output_proposals (
* BRIDGING DATA * BRIDGING DATA
*/ */
/**
* TOKEN DATA
*/
-- L1 Token table
CREATE TABLE IF NOT EXISTS l1_tokens (
address VARCHAR PRIMARY KEY,
bridge_address VARCHAR NOT NULL,
l2_token_address VARCHAR NOT NULL,
name VARCHAR NOT NULL,
symbol VARCHAR NOT NULL,
decimals INTEGER NOT NULL CHECK (decimals >= 0 AND decimals <= 18)
);
-- L2 Token table
CREATE TABLE IF NOT EXISTS l2_tokens (
address VARCHAR PRIMARY KEY,
bridge_address VARCHAR NOT NULL,
l1_token_address VARCHAR REFERENCES l1_tokens(address),
name VARCHAR NOT NULL,
symbol VARCHAR NOT NULL,
decimals INTEGER NOT NULL CHECK (decimals >= 0 AND decimals <= 18)
);
-- OptimismPortal/L2ToL1MessagePasser -- OptimismPortal/L2ToL1MessagePasser
CREATE TABLE IF NOT EXISTS l1_transaction_deposits ( CREATE TABLE IF NOT EXISTS l1_transaction_deposits (
source_hash VARCHAR NOT NULL PRIMARY KEY, source_hash VARCHAR NOT NULL PRIMARY KEY,
...@@ -91,10 +116,6 @@ CREATE TABLE IF NOT EXISTS l1_transaction_deposits ( ...@@ -91,10 +116,6 @@ CREATE TABLE IF NOT EXISTS l1_transaction_deposits (
initiated_l1_event_guid VARCHAR NOT NULL REFERENCES l1_contract_events(guid), initiated_l1_event_guid VARCHAR NOT NULL REFERENCES l1_contract_events(guid),
-- OptimismPortal specific
version UINT256 NOT NULL,
opaque_data VARCHAR NOT NULL,
-- transaction data -- transaction data
from_address VARCHAR NOT NULL, from_address VARCHAR NOT NULL,
to_address VARCHAR NOT NULL, to_address VARCHAR NOT NULL,
...@@ -127,9 +148,10 @@ CREATE TABLE IF NOT EXISTS l2_transaction_withdrawals ( ...@@ -127,9 +148,10 @@ CREATE TABLE IF NOT EXISTS l2_transaction_withdrawals (
-- CrossDomainMessenger -- CrossDomainMessenger
CREATE TABLE IF NOT EXISTS l1_bridge_messages( CREATE TABLE IF NOT EXISTS l1_bridge_messages(
nonce UINT256 NOT NULL PRIMARY KEY, message_hash VARCHAR NOT NULL PRIMARY KEY,
message_hash VARCHAR NOT NULL, nonce UINT256 NOT NULL UNIQUE,
transaction_source_hash VARCHAR NOT NULL REFERENCES l1_transaction_deposits(source_hash),
transaction_source_hash VARCHAR NOT NULL UNIQUE REFERENCES l1_transaction_deposits(source_hash),
sent_message_event_guid VARCHAR NOT NULL UNIQUE REFERENCES l1_contract_events(guid), sent_message_event_guid VARCHAR NOT NULL UNIQUE REFERENCES l1_contract_events(guid),
relayed_message_event_guid VARCHAR UNIQUE REFERENCES l2_contract_events(guid), relayed_message_event_guid VARCHAR UNIQUE REFERENCES l2_contract_events(guid),
...@@ -143,9 +165,10 @@ CREATE TABLE IF NOT EXISTS l1_bridge_messages( ...@@ -143,9 +165,10 @@ CREATE TABLE IF NOT EXISTS l1_bridge_messages(
timestamp INTEGER NOT NULL CHECK (timestamp > 0) timestamp INTEGER NOT NULL CHECK (timestamp > 0)
); );
CREATE TABLE IF NOT EXISTS l2_bridge_messages( CREATE TABLE IF NOT EXISTS l2_bridge_messages(
nonce UINT256 NOT NULL PRIMARY KEY, message_hash VARCHAR NOT NULL PRIMARY KEY,
message_hash VARCHAR NOT NULL, nonce UINT256 NOT NULL UNIQUE,
transaction_withdrawal_hash VARCHAR NOT NULL REFERENCES l2_transaction_withdrawals(withdrawal_hash),
transaction_withdrawal_hash VARCHAR NOT NULL UNIQUE REFERENCES l2_transaction_withdrawals(withdrawal_hash),
sent_message_event_guid VARCHAR NOT NULL UNIQUE REFERENCES l2_contract_events(guid), sent_message_event_guid VARCHAR NOT NULL UNIQUE REFERENCES l2_contract_events(guid),
relayed_message_event_guid VARCHAR UNIQUE REFERENCES l1_contract_events(guid), relayed_message_event_guid VARCHAR UNIQUE REFERENCES l1_contract_events(guid),
...@@ -163,15 +186,15 @@ CREATE TABLE IF NOT EXISTS l2_bridge_messages( ...@@ -163,15 +186,15 @@ CREATE TABLE IF NOT EXISTS l2_bridge_messages(
CREATE TABLE IF NOT EXISTS l1_bridge_deposits ( CREATE TABLE IF NOT EXISTS l1_bridge_deposits (
transaction_source_hash VARCHAR PRIMARY KEY REFERENCES l1_transaction_deposits(source_hash), transaction_source_hash VARCHAR PRIMARY KEY REFERENCES l1_transaction_deposits(source_hash),
-- We allow the cross_domain_messenger_nonce to be NULL-able to account -- We allow the cross_domain_message_hash to be NULL-able to account
-- for scenarios where ETH is simply sent to the OptimismPortal contract -- for scenarios where ETH is simply sent to the OptimismPortal contract
cross_domain_messenger_nonce UINT256 UNIQUE REFERENCES l1_bridge_messages(nonce), cross_domain_message_hash VARCHAR UNIQUE REFERENCES l1_bridge_messages(message_hash),
-- Deposit information -- Deposit information
from_address VARCHAR NOT NULL, from_address VARCHAR NOT NULL,
to_address VARCHAR NOT NULL, to_address VARCHAR NOT NULL,
l1_token_address VARCHAR NOT NULL, local_token_address VARCHAR NOT NULL, -- REFERENCES l1_tokens(address), uncomment me in future pr
l2_token_address VARCHAR NOT NULL, remote_token_address VARCHAR NOT NULL, -- REFERENCES l2_tokens(address), uncomment me in future pr
amount UINT256 NOT NULL, amount UINT256 NOT NULL,
data VARCHAR NOT NULL, data VARCHAR NOT NULL,
timestamp INTEGER NOT NULL CHECK (timestamp > 0) timestamp INTEGER NOT NULL CHECK (timestamp > 0)
...@@ -179,15 +202,15 @@ CREATE TABLE IF NOT EXISTS l1_bridge_deposits ( ...@@ -179,15 +202,15 @@ CREATE TABLE IF NOT EXISTS l1_bridge_deposits (
CREATE TABLE IF NOT EXISTS l2_bridge_withdrawals ( CREATE TABLE IF NOT EXISTS l2_bridge_withdrawals (
transaction_withdrawal_hash VARCHAR PRIMARY KEY REFERENCES l2_transaction_withdrawals(withdrawal_hash), transaction_withdrawal_hash VARCHAR PRIMARY KEY REFERENCES l2_transaction_withdrawals(withdrawal_hash),
-- We allow the cross_domain_messenger_nonce to be NULL-able to account for -- We allow the cross_domain_message_hash to be NULL-able to account for
-- scenarios where ETH is simply sent to the L2ToL1MessagePasser contract -- scenarios where ETH is simply sent to the L2ToL1MessagePasser contract
cross_domain_messenger_nonce UINT256 UNIQUE REFERENCES l2_bridge_messages(nonce), cross_domain_message_hash VARCHAR UNIQUE REFERENCES l2_bridge_messages(message_hash),
-- Withdrawal information -- Withdrawal information
from_address VARCHAR NOT NULL, from_address VARCHAR NOT NULL,
to_address VARCHAR NOT NULL, to_address VARCHAR NOT NULL,
l1_token_address VARCHAR NOT NULL, local_token_address VARCHAR NOT NULL, -- REFERENCES l2_tokens(address), uncomment me in future pr
l2_token_address VARCHAR NOT NULL, remote_token_address VARCHAR NOT NULL, -- REFERENCES l1_tokens(address), uncomment me in future pr
amount UINT256 NOT NULL, amount UINT256 NOT NULL,
data VARCHAR NOT NULL, data VARCHAR NOT NULL,
timestamp INTEGER NOT NULL CHECK (timestamp > 0) timestamp INTEGER NOT NULL CHECK (timestamp > 0)
......
...@@ -12,7 +12,6 @@ import ( ...@@ -12,7 +12,6 @@ import (
"github.com/ethereum-optimism/optimism/indexer/node" "github.com/ethereum-optimism/optimism/indexer/node"
"github.com/ethereum-optimism/optimism/op-bindings/bindings" "github.com/ethereum-optimism/optimism/op-bindings/bindings"
legacy_bindings "github.com/ethereum-optimism/optimism/op-bindings/legacy-bindings" legacy_bindings "github.com/ethereum-optimism/optimism/op-bindings/legacy-bindings"
"github.com/ethereum-optimism/optimism/op-bindings/predeploys"
"github.com/ethereum-optimism/optimism/op-node/rollup/derive" "github.com/ethereum-optimism/optimism/op-node/rollup/derive"
"github.com/ethereum/go-ethereum" "github.com/ethereum/go-ethereum"
...@@ -222,7 +221,7 @@ func l1ProcessFn(processLog log.Logger, ethClient node.EthClient, l1Contracts co ...@@ -222,7 +221,7 @@ func l1ProcessFn(processLog log.Logger, ethClient node.EthClient, l1Contracts co
} }
// forward along contract events to standard bridge processor // forward along contract events to standard bridge processor
err = l1ProcessContractEventsStandardBridge(processLog, db, ethClient, processedContractEvents) err = l1ProcessContractEventsStandardBridge(processLog, db, processedContractEvents)
if err != nil { if err != nil {
return err return err
} }
...@@ -250,8 +249,6 @@ func l1ProcessContractEventsBridgeTransactions(processLog log.Logger, db *databa ...@@ -250,8 +249,6 @@ func l1ProcessContractEventsBridgeTransactions(processLog log.Logger, db *databa
SourceHash: depositTx.SourceHash, SourceHash: depositTx.SourceHash,
L2TransactionHash: types.NewTx(depositTx).Hash(), L2TransactionHash: types.NewTx(depositTx).Hash(),
InitiatedL1EventGUID: depositEvent.Event.GUID, InitiatedL1EventGUID: depositEvent.Event.GUID,
Version: database.U256{Int: depositEvent.Version},
OpaqueData: depositEvent.OpaqueData,
GasLimit: database.U256{Int: new(big.Int).SetUint64(depositTx.Gas)}, GasLimit: database.U256{Int: new(big.Int).SetUint64(depositTx.Gas)},
Tx: database.Transaction{ Tx: database.Transaction{
FromAddress: depositTx.From, FromAddress: depositTx.From,
...@@ -266,11 +263,10 @@ func l1ProcessContractEventsBridgeTransactions(processLog log.Logger, db *databa ...@@ -266,11 +263,10 @@ func l1ProcessContractEventsBridgeTransactions(processLog log.Logger, db *databa
if len(depositTx.Data) == 0 && depositTx.Value.BitLen() > 0 { if len(depositTx.Data) == 0 && depositTx.Value.BitLen() > 0 {
ethDeposits = append(ethDeposits, &database.L1BridgeDeposit{ ethDeposits = append(ethDeposits, &database.L1BridgeDeposit{
TransactionSourceHash: depositTx.SourceHash, TransactionSourceHash: depositTx.SourceHash,
Tx: transactionDeposits[i].Tx, BridgeTransfer: database.BridgeTransfer{
TokenPair: database.TokenPair{ Tx: transactionDeposits[i].Tx,
// TODO index eth token if it doesn't exist // TODO index eth token if it doesn't exist
L1TokenAddress: predeploys.LegacyERC20ETHAddr, TokenPair: database.ETHTokenPair,
L2TokenAddress: predeploys.LegacyERC20ETHAddr,
}, },
}) })
} }
...@@ -386,8 +382,8 @@ func l1ProcessContractEventsBridgeCrossDomainMessages(processLog log.Logger, db ...@@ -386,8 +382,8 @@ func l1ProcessContractEventsBridgeCrossDomainMessages(processLog log.Logger, db
sentMessages[i] = &database.L1BridgeMessage{ sentMessages[i] = &database.L1BridgeMessage{
TransactionSourceHash: depositTx.SourceHash, TransactionSourceHash: depositTx.SourceHash,
BridgeMessage: database.BridgeMessage{ BridgeMessage: database.BridgeMessage{
Nonce: database.U256{Int: sentMessageEvent.MessageNonce},
MessageHash: sentMessageEvent.MessageHash, MessageHash: sentMessageEvent.MessageHash,
Nonce: database.U256{Int: sentMessageEvent.MessageNonce},
SentMessageEventGUID: sentMessageEvent.Event.GUID, SentMessageEventGUID: sentMessageEvent.Event.GUID,
GasLimit: database.U256{Int: sentMessageEvent.GasLimit}, GasLimit: database.U256{Int: sentMessageEvent.GasLimit},
Tx: database.Transaction{ Tx: database.Transaction{
...@@ -419,7 +415,7 @@ func l1ProcessContractEventsBridgeCrossDomainMessages(processLog log.Logger, db ...@@ -419,7 +415,7 @@ func l1ProcessContractEventsBridgeCrossDomainMessages(processLog log.Logger, db
} }
for _, relayedMessage := range relayedMessageEvents { for _, relayedMessage := range relayedMessageEvents {
message, err := db.BridgeMessages.L2BridgeMessageByHash(relayedMessage.MsgHash) message, err := db.BridgeMessages.L2BridgeMessage(relayedMessage.MsgHash)
if err != nil { if err != nil {
return err return err
} else if message == nil { } else if message == nil {
...@@ -443,9 +439,7 @@ func l1ProcessContractEventsBridgeCrossDomainMessages(processLog log.Logger, db ...@@ -443,9 +439,7 @@ func l1ProcessContractEventsBridgeCrossDomainMessages(processLog log.Logger, db
return nil return nil
} }
func l1ProcessContractEventsStandardBridge(processLog log.Logger, db *database.DB, ethClient node.EthClient, events *ProcessedContractEvents) error { func l1ProcessContractEventsStandardBridge(processLog log.Logger, db *database.DB, events *ProcessedContractEvents) error {
rawEthClient := ethclient.NewClient(ethClient.RawRpcClient())
// (1) Process New Deposits // (1) Process New Deposits
initiatedDepositEvents, err := StandardBridgeInitiatedEvents(events) initiatedDepositEvents, err := StandardBridgeInitiatedEvents(events)
if err != nil { if err != nil {
...@@ -465,16 +459,18 @@ func l1ProcessContractEventsStandardBridge(processLog log.Logger, db *database.D ...@@ -465,16 +459,18 @@ func l1ProcessContractEventsStandardBridge(processLog log.Logger, db *database.D
} }
deposits[i] = &database.L1BridgeDeposit{ deposits[i] = &database.L1BridgeDeposit{
TransactionSourceHash: depositTx.SourceHash, TransactionSourceHash: depositTx.SourceHash,
CrossDomainMessengerNonce: &database.U256{Int: initiatedBridgeEvent.CrossDomainMessengerNonce}, BridgeTransfer: database.BridgeTransfer{
// TODO index the tokens pairs if they don't exist CrossDomainMessageHash: &initiatedBridgeEvent.CrossDomainMessageHash,
TokenPair: database.TokenPair{L1TokenAddress: initiatedBridgeEvent.LocalToken, L2TokenAddress: initiatedBridgeEvent.RemoteToken}, // TODO index the tokens pairs if they don't exist
Tx: database.Transaction{ TokenPair: database.TokenPair{LocalTokenAddress: initiatedBridgeEvent.LocalToken, RemoteTokenAddress: initiatedBridgeEvent.RemoteToken},
FromAddress: initiatedBridgeEvent.From, Tx: database.Transaction{
ToAddress: initiatedBridgeEvent.To, FromAddress: initiatedBridgeEvent.From,
Amount: database.U256{Int: initiatedBridgeEvent.Amount}, ToAddress: initiatedBridgeEvent.To,
Data: initiatedBridgeEvent.ExtraData, Amount: database.U256{Int: initiatedBridgeEvent.Amount},
Timestamp: initiatedBridgeEvent.Event.Timestamp, Data: initiatedBridgeEvent.ExtraData,
Timestamp: initiatedBridgeEvent.Event.Timestamp,
},
}, },
} }
} }
...@@ -491,26 +487,25 @@ func l1ProcessContractEventsStandardBridge(processLog log.Logger, db *database.D ...@@ -491,26 +487,25 @@ func l1ProcessContractEventsStandardBridge(processLog log.Logger, db *database.D
// - We dont need do anything actionable on the database here as this is layered on top of the // - We dont need do anything actionable on the database here as this is layered on top of the
// bridge transaction & messages that have a tracked lifecyle. We simply walk through and ensure // bridge transaction & messages that have a tracked lifecyle. We simply walk through and ensure
// that the corresponding initiated withdrawals exist and match as an integrity check // that the corresponding initiated withdrawals exist and match as an integrity check
finalizedWithdrawalEvents, err := StandardBridgeFinalizedEvents(rawEthClient, events) finalizedWithdrawalEvents, err := StandardBridgeFinalizedEvents(events)
if err != nil { if err != nil {
return err return err
} }
for _, finalizedWithdrawalEvent := range finalizedWithdrawalEvents { for _, finalizedWithdrawalEvent := range finalizedWithdrawalEvents {
withdrawal, err := db.BridgeTransfers.L2BridgeWithdrawalByCrossDomainMessengerNonce(finalizedWithdrawalEvent.CrossDomainMessengerNonce) withdrawal, err := db.BridgeTransfers.L2BridgeWithdrawalWithFilter(database.BridgeTransfer{CrossDomainMessageHash: &finalizedWithdrawalEvent.CrossDomainMessageHash})
if err != nil { if err != nil {
return err return err
} else if withdrawal == nil { } else if withdrawal == nil {
processLog.Error("missing indexed L2StandardBridge withdrawal for finalization", "cross_domain_messenger_nonce", finalizedWithdrawalEvent.CrossDomainMessengerNonce) processLog.Error("missing indexed L2StandardBridge withdrawal for finalization", "cross_domain_message_hash", finalizedWithdrawalEvent.CrossDomainMessageHash)
return errors.New("missing indexed L2StandardBridge withdrawal for finalization event") return errors.New("missing indexed L2StandardBridge withdrawal for finalization event")
} }
// sanity check on the bridge fields // sanity check on the bridge fields
if finalizedWithdrawalEvent.From != withdrawal.Tx.FromAddress || finalizedWithdrawalEvent.To != withdrawal.Tx.ToAddress || if finalizedWithdrawalEvent.From != withdrawal.Tx.FromAddress || finalizedWithdrawalEvent.To != withdrawal.Tx.ToAddress ||
finalizedWithdrawalEvent.Amount.Cmp(withdrawal.Tx.Amount.Int) != 0 || !bytes.Equal(finalizedWithdrawalEvent.ExtraData, withdrawal.Tx.Data) || finalizedWithdrawalEvent.Amount.Cmp(withdrawal.Tx.Amount.Int) != 0 || !bytes.Equal(finalizedWithdrawalEvent.ExtraData, withdrawal.Tx.Data) ||
finalizedWithdrawalEvent.LocalToken != withdrawal.TokenPair.L1TokenAddress || finalizedWithdrawalEvent.RemoteToken != withdrawal.TokenPair.L2TokenAddress { finalizedWithdrawalEvent.LocalToken != withdrawal.TokenPair.LocalTokenAddress || finalizedWithdrawalEvent.RemoteToken != withdrawal.TokenPair.RemoteTokenAddress {
processLog.Crit("bridge finalization fields mismatch with initiated fields!", "tx_withdrawal_hash", withdrawal.TransactionWithdrawalHash, "cross_domain_messenger_nonce", withdrawal.CrossDomainMessengerNonce.Int) processLog.Crit("bridge finalization fields mismatch with initiated fields!", "tx_withdrawal_hash", withdrawal.TransactionWithdrawalHash, "cross_domain_message_hash", withdrawal.CrossDomainMessageHash)
return errors.New("bridge tx mismatch!")
} }
} }
......
...@@ -10,7 +10,6 @@ import ( ...@@ -10,7 +10,6 @@ import (
"github.com/ethereum-optimism/optimism/indexer/database" "github.com/ethereum-optimism/optimism/indexer/database"
"github.com/ethereum-optimism/optimism/indexer/node" "github.com/ethereum-optimism/optimism/indexer/node"
"github.com/ethereum-optimism/optimism/op-bindings/bindings" "github.com/ethereum-optimism/optimism/op-bindings/bindings"
"github.com/ethereum-optimism/optimism/op-bindings/predeploys"
"github.com/ethereum/go-ethereum" "github.com/ethereum/go-ethereum"
"github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common"
...@@ -160,7 +159,7 @@ func l2ProcessFn(processLog log.Logger, ethClient node.EthClient, l2Contracts L2 ...@@ -160,7 +159,7 @@ func l2ProcessFn(processLog log.Logger, ethClient node.EthClient, l2Contracts L2
} }
// forward along contract events to standard bridge processor // forward along contract events to standard bridge processor
err = l2ProcessContractEventsStandardBridge(processLog, db, ethClient, processedContractEvents) err = l2ProcessContractEventsStandardBridge(processLog, db, processedContractEvents)
if err != nil { if err != nil {
return err return err
} }
...@@ -198,10 +197,9 @@ func l2ProcessContractEventsBridgeTransactions(processLog log.Logger, db *databa ...@@ -198,10 +197,9 @@ func l2ProcessContractEventsBridgeTransactions(processLog log.Logger, db *databa
if len(withdrawalEvent.Data) == 0 && withdrawalEvent.Value.BitLen() > 0 { if len(withdrawalEvent.Data) == 0 && withdrawalEvent.Value.BitLen() > 0 {
ethWithdrawals = append(ethWithdrawals, &database.L2BridgeWithdrawal{ ethWithdrawals = append(ethWithdrawals, &database.L2BridgeWithdrawal{
TransactionWithdrawalHash: withdrawalEvent.WithdrawalHash, TransactionWithdrawalHash: withdrawalEvent.WithdrawalHash,
Tx: transactionWithdrawals[i].Tx, BridgeTransfer: database.BridgeTransfer{
TokenPair: database.TokenPair{ Tx: transactionWithdrawals[i].Tx,
L1TokenAddress: predeploys.LegacyERC20ETHAddr, TokenPair: database.ETHTokenPair,
L2TokenAddress: predeploys.LegacyERC20ETHAddr,
}, },
}) })
} }
...@@ -258,8 +256,8 @@ func l2ProcessContractEventsBridgeCrossDomainMessages(processLog log.Logger, db ...@@ -258,8 +256,8 @@ func l2ProcessContractEventsBridgeCrossDomainMessages(processLog log.Logger, db
sentMessages[i] = &database.L2BridgeMessage{ sentMessages[i] = &database.L2BridgeMessage{
TransactionWithdrawalHash: msgPassedEvent.WithdrawalHash, TransactionWithdrawalHash: msgPassedEvent.WithdrawalHash,
BridgeMessage: database.BridgeMessage{ BridgeMessage: database.BridgeMessage{
Nonce: database.U256{Int: sentMessageEvent.MessageNonce},
MessageHash: sentMessageEvent.MessageHash, MessageHash: sentMessageEvent.MessageHash,
Nonce: database.U256{Int: sentMessageEvent.MessageNonce},
SentMessageEventGUID: sentMessageEvent.Event.GUID, SentMessageEventGUID: sentMessageEvent.Event.GUID,
GasLimit: database.U256{Int: sentMessageEvent.GasLimit}, GasLimit: database.U256{Int: sentMessageEvent.GasLimit},
Tx: database.Transaction{ Tx: database.Transaction{
...@@ -298,7 +296,7 @@ func l2ProcessContractEventsBridgeCrossDomainMessages(processLog log.Logger, db ...@@ -298,7 +296,7 @@ func l2ProcessContractEventsBridgeCrossDomainMessages(processLog log.Logger, db
} }
for _, relayedMessage := range relayedMessageEvents { for _, relayedMessage := range relayedMessageEvents {
message, err := db.BridgeMessages.L1BridgeMessageByHash(relayedMessage.MsgHash) message, err := db.BridgeMessages.L1BridgeMessage(relayedMessage.MsgHash)
if err != nil { if err != nil {
return err return err
} }
...@@ -330,9 +328,7 @@ func l2ProcessContractEventsBridgeCrossDomainMessages(processLog log.Logger, db ...@@ -330,9 +328,7 @@ func l2ProcessContractEventsBridgeCrossDomainMessages(processLog log.Logger, db
return nil return nil
} }
func l2ProcessContractEventsStandardBridge(processLog log.Logger, db *database.DB, ethClient node.EthClient, events *ProcessedContractEvents) error { func l2ProcessContractEventsStandardBridge(processLog log.Logger, db *database.DB, events *ProcessedContractEvents) error {
rawEthClient := ethclient.NewClient(ethClient.RawRpcClient())
l2ToL1MessagePasserABI, err := bindings.NewL2ToL1MessagePasser(common.Address{}, nil) l2ToL1MessagePasserABI, err := bindings.NewL2ToL1MessagePasser(common.Address{}, nil)
if err != nil { if err != nil {
return err return err
...@@ -357,14 +353,16 @@ func l2ProcessContractEventsStandardBridge(processLog log.Logger, db *database.D ...@@ -357,14 +353,16 @@ func l2ProcessContractEventsStandardBridge(processLog log.Logger, db *database.D
withdrawals[i] = &database.L2BridgeWithdrawal{ withdrawals[i] = &database.L2BridgeWithdrawal{
TransactionWithdrawalHash: msgPassedEvent.WithdrawalHash, TransactionWithdrawalHash: msgPassedEvent.WithdrawalHash,
CrossDomainMessengerNonce: &database.U256{Int: initiatedBridgeEvent.CrossDomainMessengerNonce}, BridgeTransfer: database.BridgeTransfer{
TokenPair: database.TokenPair{L1TokenAddress: initiatedBridgeEvent.LocalToken, L2TokenAddress: initiatedBridgeEvent.RemoteToken}, CrossDomainMessageHash: &initiatedBridgeEvent.CrossDomainMessageHash,
Tx: database.Transaction{ TokenPair: database.TokenPair{LocalTokenAddress: initiatedBridgeEvent.LocalToken, RemoteTokenAddress: initiatedBridgeEvent.RemoteToken},
FromAddress: initiatedBridgeEvent.From, Tx: database.Transaction{
ToAddress: initiatedBridgeEvent.To, FromAddress: initiatedBridgeEvent.From,
Amount: database.U256{Int: initiatedBridgeEvent.Amount}, ToAddress: initiatedBridgeEvent.To,
Data: initiatedBridgeEvent.ExtraData, Amount: database.U256{Int: initiatedBridgeEvent.Amount},
Timestamp: initiatedBridgeEvent.Event.Timestamp, Data: initiatedBridgeEvent.ExtraData,
Timestamp: initiatedBridgeEvent.Event.Timestamp,
},
}, },
} }
} }
...@@ -382,26 +380,26 @@ func l2ProcessContractEventsStandardBridge(processLog log.Logger, db *database.D ...@@ -382,26 +380,26 @@ func l2ProcessContractEventsStandardBridge(processLog log.Logger, db *database.D
// bridge transaction & messages that have a tracked lifecyle. We simply walk through and ensure // bridge transaction & messages that have a tracked lifecyle. We simply walk through and ensure
// that the corresponding initiated deposits exist as an integrity check // that the corresponding initiated deposits exist as an integrity check
finalizedDepositEvents, err := StandardBridgeFinalizedEvents(rawEthClient, events) finalizedDepositEvents, err := StandardBridgeFinalizedEvents(events)
if err != nil { if err != nil {
return err return err
} }
for _, finalizedDepositEvent := range finalizedDepositEvents { for _, finalizedDepositEvent := range finalizedDepositEvents {
deposit, err := db.BridgeTransfers.L1BridgeDepositByCrossDomainMessengerNonce(finalizedDepositEvent.CrossDomainMessengerNonce) deposit, err := db.BridgeTransfers.L1BridgeDepositWithFilter(database.BridgeTransfer{CrossDomainMessageHash: &finalizedDepositEvent.CrossDomainMessageHash})
if err != nil { if err != nil {
return err return err
} else if deposit == nil { } else if deposit == nil {
// Indexed CrossDomainMessenger messages ensure we're in a caught up state here // Indexed CrossDomainMessenger messages ensure we're in a caught up state here
processLog.Error("missing indexed L1StandardBridge deposit on finalization", "cross_domain_messenger_nonce", finalizedDepositEvent.CrossDomainMessengerNonce) processLog.Error("missing indexed L1StandardBridge deposit on finalization", "cross_domain_massage_hash", finalizedDepositEvent.CrossDomainMessageHash)
return errors.New("missing indexed L1StandardBridge deposit on finalization") return errors.New("missing indexed L1StandardBridge deposit on finalization")
} }
// sanity check on the bridge fields // sanity check on the bridge fields
if finalizedDepositEvent.From != deposit.Tx.FromAddress || finalizedDepositEvent.To != deposit.Tx.ToAddress || if finalizedDepositEvent.From != deposit.Tx.FromAddress || finalizedDepositEvent.To != deposit.Tx.ToAddress ||
finalizedDepositEvent.Amount.Cmp(deposit.Tx.Amount.Int) != 0 || !bytes.Equal(finalizedDepositEvent.ExtraData, deposit.Tx.Data) || finalizedDepositEvent.Amount.Cmp(deposit.Tx.Amount.Int) != 0 || !bytes.Equal(finalizedDepositEvent.ExtraData, deposit.Tx.Data) ||
finalizedDepositEvent.LocalToken != deposit.TokenPair.L1TokenAddress || finalizedDepositEvent.RemoteToken != deposit.TokenPair.L2TokenAddress { finalizedDepositEvent.LocalToken != deposit.TokenPair.LocalTokenAddress || finalizedDepositEvent.RemoteToken != deposit.TokenPair.RemoteTokenAddress {
processLog.Error("bridge finalization fields mismatch with initiated fields!", "tx_source_hash", deposit.TransactionSourceHash, "cross_domain_messenger_nonce", deposit.CrossDomainMessengerNonce.Int) processLog.Error("bridge finalization fields mismatch with initiated fields!", "tx_source_hash", deposit.TransactionSourceHash, "cross_domain_message_hash", deposit.CrossDomainMessageHash)
return errors.New("bridge tx mismatch") return errors.New("bridge tx mismatch")
} }
} }
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
...@@ -11,7 +11,9 @@ import ( ...@@ -11,7 +11,9 @@ import (
"github.com/ethereum-optimism/optimism/op-bindings/bindings" "github.com/ethereum-optimism/optimism/op-bindings/bindings"
"github.com/ethereum-optimism/optimism/op-chain-ops/deployer" "github.com/ethereum-optimism/optimism/op-chain-ops/deployer"
"github.com/ethereum-optimism/optimism/op-chain-ops/genesis" "github.com/ethereum-optimism/optimism/op-chain-ops/genesis"
"github.com/ethereum-optimism/optimism/op-challenger/config"
"github.com/ethereum-optimism/optimism/op-challenger/fault/alphabet" "github.com/ethereum-optimism/optimism/op-challenger/fault/alphabet"
"github.com/ethereum-optimism/optimism/op-e2e/e2eutils/challenger"
"github.com/ethereum-optimism/optimism/op-e2e/e2eutils/wait" "github.com/ethereum-optimism/optimism/op-e2e/e2eutils/wait"
"github.com/ethereum/go-ethereum/accounts/abi/bind" "github.com/ethereum/go-ethereum/accounts/abi/bind"
"github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common"
...@@ -52,6 +54,7 @@ type FactoryHelper struct { ...@@ -52,6 +54,7 @@ type FactoryHelper struct {
require *require.Assertions require *require.Assertions
client *ethclient.Client client *ethclient.Client
opts *bind.TransactOpts opts *bind.TransactOpts
factoryAddr common.Address
factory *bindings.DisputeGameFactory factory *bindings.DisputeGameFactory
blockOracle *bindings.BlockOracle blockOracle *bindings.BlockOracle
l2oo *bindings.L2OutputOracleCaller l2oo *bindings.L2OutputOracleCaller
...@@ -65,7 +68,8 @@ func NewFactoryHelper(t *testing.T, ctx context.Context, deployments *genesis.L1 ...@@ -65,7 +68,8 @@ func NewFactoryHelper(t *testing.T, ctx context.Context, deployments *genesis.L1
require.NoError(err) require.NoError(err)
require.NotNil(deployments, "No deployments") require.NotNil(deployments, "No deployments")
factory, err := bindings.NewDisputeGameFactory(deployments.DisputeGameFactoryProxy, client) factoryAddr := deployments.DisputeGameFactoryProxy
factory, err := bindings.NewDisputeGameFactory(factoryAddr, client)
require.NoError(err) require.NoError(err)
blockOracle, err := bindings.NewBlockOracle(deployments.BlockOracle, client) blockOracle, err := bindings.NewBlockOracle(deployments.BlockOracle, client)
require.NoError(err) require.NoError(err)
...@@ -78,6 +82,7 @@ func NewFactoryHelper(t *testing.T, ctx context.Context, deployments *genesis.L1 ...@@ -78,6 +82,7 @@ func NewFactoryHelper(t *testing.T, ctx context.Context, deployments *genesis.L1
client: client, client: client,
opts: opts, opts: opts,
factory: factory, factory: factory,
factoryAddr: factoryAddr,
blockOracle: blockOracle, blockOracle: blockOracle,
l2oo: l2oo, l2oo: l2oo,
} }
...@@ -150,6 +155,21 @@ func (h *FactoryHelper) StartCannonGame(ctx context.Context, rootClaim common.Ha ...@@ -150,6 +155,21 @@ func (h *FactoryHelper) StartCannonGame(ctx context.Context, rootClaim common.Ha
}, },
} }
} }
func (h *FactoryHelper) StartChallenger(ctx context.Context, l1Endpoint string, name string, options ...challenger.Option) *challenger.Helper {
opts := []challenger.Option{
func(c *config.Config) {
// Uncomment when challenger actually supports setting the game factory address
//c.FactoryAddress = h.factoryAddr
c.TraceType = config.TraceTypeAlphabet
},
}
opts = append(opts, options...)
c := challenger.NewChallenger(h.t, ctx, l1Endpoint, name, opts...)
h.t.Cleanup(func() {
_ = c.Close()
})
return c
}
// waitForProposals waits until there are at least two proposals in the output oracle // waitForProposals waits until there are at least two proposals in the output oracle
// This is the minimum required for creating a game. // This is the minimum required for creating a game.
......
...@@ -13,6 +13,37 @@ import ( ...@@ -13,6 +13,37 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
func TestCannonMultipleGames(t *testing.T) {
t.Skip("Challenger doesn't yet support multiple games")
InitParallel(t)
ctx := context.Background()
sys, l1Client := startFaultDisputeSystem(t)
t.Cleanup(sys.Close)
gameFactory := disputegame.NewFactoryHelper(t, ctx, sys.cfg.L1Deployments, l1Client)
// Start a challenger with the correct alphabet trace
gameFactory.StartChallenger(ctx, sys.NodeEndpoint("l1"), "TowerDefense", func(c *config.Config) {
c.AgreeWithProposedOutput = true
c.AlphabetTrace = "abcdefg"
c.TxMgrConfig.PrivateKey = e2eutils.EncodePrivKeyToString(sys.cfg.Secrets.Alice)
})
game1 := gameFactory.StartAlphabetGame(ctx, "abcxyz")
// Wait for the challenger to respond to the first game
game1.WaitForClaimCount(ctx, 2)
game2 := gameFactory.StartAlphabetGame(ctx, "zyxabc")
// Wait for the challenger to respond to the second game
game2.WaitForClaimCount(ctx, 2)
// Challenger should respond to new claims
game2.Attack(ctx, 1, common.Hash{0xaa})
game2.WaitForClaimCount(ctx, 4)
game1.Defend(ctx, 1, common.Hash{0xaa})
game1.WaitForClaimCount(ctx, 4)
}
func TestResolveDisputeGame(t *testing.T) { func TestResolveDisputeGame(t *testing.T) {
InitParallel(t) InitParallel(t)
......
...@@ -66,6 +66,9 @@ type Metricer interface { ...@@ -66,6 +66,9 @@ type Metricer interface {
RecordSequencerSealingTime(duration time.Duration) RecordSequencerSealingTime(duration time.Duration)
Document() []metrics.DocumentedMetric Document() []metrics.DocumentedMetric
RecordChannelInputBytes(num int) RecordChannelInputBytes(num int)
RecordHeadChannelOpened()
RecordChannelTimedOut()
RecordFrame()
// P2P Metrics // P2P Metrics
SetPeerScores(allScores []store.PeerScores) SetPeerScores(allScores []store.PeerScores)
ClientPayloadByNumberEvent(num uint64, resultCode byte, duration time.Duration) ClientPayloadByNumberEvent(num uint64, resultCode byte, duration time.Duration)
...@@ -132,6 +135,11 @@ type Metrics struct { ...@@ -132,6 +135,11 @@ type Metrics struct {
TransactionsSequencedTotal prometheus.Counter TransactionsSequencedTotal prometheus.Counter
// Channel Bank Metrics
headChannelOpenedEvent *EventMetrics
channelTimedOutEvent *EventMetrics
frameAddedEvent *EventMetrics
// P2P Metrics // P2P Metrics
PeerCount prometheus.Gauge PeerCount prometheus.Gauge
StreamCount prometheus.Gauge StreamCount prometheus.Gauge
...@@ -363,6 +371,10 @@ func NewMetrics(procName string) *Metrics { ...@@ -363,6 +371,10 @@ func NewMetrics(procName string) *Metrics {
Help: "Count of incoming dial attempts to accept, with label to filter to allowed attempts", Help: "Count of incoming dial attempts to accept, with label to filter to allowed attempts",
}, []string{"allow"}), }, []string{"allow"}),
headChannelOpenedEvent: NewEventMetrics(factory, ns, "head_channel", "New channel at the front of the channel bank"),
channelTimedOutEvent: NewEventMetrics(factory, ns, "channel_timeout", "Channel has timed out"),
frameAddedEvent: NewEventMetrics(factory, ns, "frame_added", "New frame ingested in the channel bank"),
ChannelInputBytes: factory.NewCounter(prometheus.CounterOpts{ ChannelInputBytes: factory.NewCounter(prometheus.CounterOpts{
Namespace: ns, Namespace: ns,
Name: "channel_input_bytes", Name: "channel_input_bytes",
...@@ -700,6 +712,18 @@ func (m *Metrics) RecordChannelInputBytes(inputCompressedBytes int) { ...@@ -700,6 +712,18 @@ func (m *Metrics) RecordChannelInputBytes(inputCompressedBytes int) {
m.ChannelInputBytes.Add(float64(inputCompressedBytes)) m.ChannelInputBytes.Add(float64(inputCompressedBytes))
} }
func (m *Metrics) RecordHeadChannelOpened() {
m.headChannelOpenedEvent.RecordEvent()
}
func (m *Metrics) RecordChannelTimedOut() {
m.channelTimedOutEvent.RecordEvent()
}
func (m *Metrics) RecordFrame() {
m.frameAddedEvent.RecordEvent()
}
func (m *Metrics) RecordPeerUnban() { func (m *Metrics) RecordPeerUnban() {
m.PeerUnbans.Inc() m.PeerUnbans.Inc()
} }
...@@ -830,6 +854,15 @@ func (n *noopMetricer) PayloadsQuarantineSize(int) { ...@@ -830,6 +854,15 @@ func (n *noopMetricer) PayloadsQuarantineSize(int) {
func (n *noopMetricer) RecordChannelInputBytes(int) { func (n *noopMetricer) RecordChannelInputBytes(int) {
} }
func (n *noopMetricer) RecordHeadChannelOpened() {
}
func (n *noopMetricer) RecordChannelTimedOut() {
}
func (n *noopMetricer) RecordFrame() {
}
func (n *noopMetricer) RecordPeerUnban() { func (n *noopMetricer) RecordPeerUnban() {
} }
......
...@@ -29,8 +29,9 @@ type NextFrameProvider interface { ...@@ -29,8 +29,9 @@ type NextFrameProvider interface {
// ChannelBank buffers channel frames, and emits full channel data // ChannelBank buffers channel frames, and emits full channel data
type ChannelBank struct { type ChannelBank struct {
log log.Logger log log.Logger
cfg *rollup.Config cfg *rollup.Config
metrics Metrics
channels map[ChannelID]*Channel // channels by ID channels map[ChannelID]*Channel // channels by ID
channelQueue []ChannelID // channels in FIFO order channelQueue []ChannelID // channels in FIFO order
...@@ -42,10 +43,11 @@ type ChannelBank struct { ...@@ -42,10 +43,11 @@ type ChannelBank struct {
var _ ResettableStage = (*ChannelBank)(nil) var _ ResettableStage = (*ChannelBank)(nil)
// NewChannelBank creates a ChannelBank, which should be Reset(origin) before use. // NewChannelBank creates a ChannelBank, which should be Reset(origin) before use.
func NewChannelBank(log log.Logger, cfg *rollup.Config, prev NextFrameProvider, fetcher L1Fetcher) *ChannelBank { func NewChannelBank(log log.Logger, cfg *rollup.Config, prev NextFrameProvider, fetcher L1Fetcher, m Metrics) *ChannelBank {
return &ChannelBank{ return &ChannelBank{
log: log, log: log,
cfg: cfg, cfg: cfg,
metrics: m,
channels: make(map[ChannelID]*Channel), channels: make(map[ChannelID]*Channel),
channelQueue: make([]ChannelID, 0, 10), channelQueue: make([]ChannelID, 0, 10),
prev: prev, prev: prev,
...@@ -83,6 +85,10 @@ func (cb *ChannelBank) IngestFrame(f Frame) { ...@@ -83,6 +85,10 @@ func (cb *ChannelBank) IngestFrame(f Frame) {
currentCh, ok := cb.channels[f.ID] currentCh, ok := cb.channels[f.ID]
if !ok { if !ok {
// Only record a head channel if it can immediately be active.
if len(cb.channelQueue) == 0 {
cb.metrics.RecordHeadChannelOpened()
}
// create new channel if it doesn't exist yet // create new channel if it doesn't exist yet
currentCh = NewChannel(f.ID, origin) currentCh = NewChannel(f.ID, origin)
cb.channels[f.ID] = currentCh cb.channels[f.ID] = currentCh
...@@ -101,6 +107,7 @@ func (cb *ChannelBank) IngestFrame(f Frame) { ...@@ -101,6 +107,7 @@ func (cb *ChannelBank) IngestFrame(f Frame) {
log.Warn("failed to ingest frame into channel", "err", err) log.Warn("failed to ingest frame into channel", "err", err)
return return
} }
cb.metrics.RecordFrame()
// Prune after the frame is loaded. // Prune after the frame is loaded.
cb.prune() cb.prune()
...@@ -117,8 +124,13 @@ func (cb *ChannelBank) Read() (data []byte, err error) { ...@@ -117,8 +124,13 @@ func (cb *ChannelBank) Read() (data []byte, err error) {
timedOut := ch.OpenBlockNumber()+cb.cfg.ChannelTimeout < cb.Origin().Number timedOut := ch.OpenBlockNumber()+cb.cfg.ChannelTimeout < cb.Origin().Number
if timedOut { if timedOut {
cb.log.Info("channel timed out", "channel", first, "frames", len(ch.inputs)) cb.log.Info("channel timed out", "channel", first, "frames", len(ch.inputs))
cb.metrics.RecordChannelTimedOut()
delete(cb.channels, first) delete(cb.channels, first)
cb.channelQueue = cb.channelQueue[1:] cb.channelQueue = cb.channelQueue[1:]
// There is a new head channel if there is a channel after we have removed the first channel
if len(cb.channelQueue) > 0 {
cb.metrics.RecordHeadChannelOpened()
}
return nil, nil // multiple different channels may all be timed out return nil, nil // multiple different channels may all be timed out
} }
if !ch.IsReady() { if !ch.IsReady() {
...@@ -128,6 +140,10 @@ func (cb *ChannelBank) Read() (data []byte, err error) { ...@@ -128,6 +140,10 @@ func (cb *ChannelBank) Read() (data []byte, err error) {
delete(cb.channels, first) delete(cb.channels, first)
cb.channelQueue = cb.channelQueue[1:] cb.channelQueue = cb.channelQueue[1:]
// There is a new head channel if there is a channel after we have removed the first channel
if len(cb.channelQueue) > 0 {
cb.metrics.RecordHeadChannelOpened()
}
r := ch.Reader() r := ch.Reader()
// Suppress error here. io.ReadAll does return nil instead of io.EOF though. // Suppress error here. io.ReadAll does return nil instead of io.EOF though.
data, _ = io.ReadAll(r) data, _ = io.ReadAll(r)
......
...@@ -8,6 +8,7 @@ import ( ...@@ -8,6 +8,7 @@ import (
"strings" "strings"
"testing" "testing"
"github.com/ethereum-optimism/optimism/op-node/metrics"
"github.com/ethereum-optimism/optimism/op-node/rollup" "github.com/ethereum-optimism/optimism/op-node/rollup"
"github.com/ethereum-optimism/optimism/op-node/testlog" "github.com/ethereum-optimism/optimism/op-node/testlog"
"github.com/ethereum-optimism/optimism/op-node/testutils" "github.com/ethereum-optimism/optimism/op-node/testutils"
...@@ -101,7 +102,7 @@ func TestChannelBankSimple(t *testing.T) { ...@@ -101,7 +102,7 @@ func TestChannelBankSimple(t *testing.T) {
cfg := &rollup.Config{ChannelTimeout: 10} cfg := &rollup.Config{ChannelTimeout: 10}
cb := NewChannelBank(testlog.Logger(t, log.LvlCrit), cfg, input, nil) cb := NewChannelBank(testlog.Logger(t, log.LvlCrit), cfg, input, nil, metrics.NoopMetrics)
// Load the first frame // Load the first frame
out, err := cb.NextData(context.Background()) out, err := cb.NextData(context.Background())
...@@ -145,7 +146,7 @@ func TestChannelBankInterleaved(t *testing.T) { ...@@ -145,7 +146,7 @@ func TestChannelBankInterleaved(t *testing.T) {
cfg := &rollup.Config{ChannelTimeout: 10} cfg := &rollup.Config{ChannelTimeout: 10}
cb := NewChannelBank(testlog.Logger(t, log.LvlCrit), cfg, input, nil) cb := NewChannelBank(testlog.Logger(t, log.LvlCrit), cfg, input, nil, metrics.NoopMetrics)
// Load a:0 // Load a:0
out, err := cb.NextData(context.Background()) out, err := cb.NextData(context.Background())
...@@ -205,7 +206,7 @@ func TestChannelBankDuplicates(t *testing.T) { ...@@ -205,7 +206,7 @@ func TestChannelBankDuplicates(t *testing.T) {
cfg := &rollup.Config{ChannelTimeout: 10} cfg := &rollup.Config{ChannelTimeout: 10}
cb := NewChannelBank(testlog.Logger(t, log.LvlCrit), cfg, input, nil) cb := NewChannelBank(testlog.Logger(t, log.LvlCrit), cfg, input, nil, metrics.NoopMetrics)
// Load the first frame // Load the first frame
out, err := cb.NextData(context.Background()) out, err := cb.NextData(context.Background())
......
...@@ -18,6 +18,9 @@ type Metrics interface { ...@@ -18,6 +18,9 @@ type Metrics interface {
RecordL2Ref(name string, ref eth.L2BlockRef) RecordL2Ref(name string, ref eth.L2BlockRef)
RecordUnsafePayloadsBuffer(length uint64, memSize uint64, next eth.BlockID) RecordUnsafePayloadsBuffer(length uint64, memSize uint64, next eth.BlockID)
RecordChannelInputBytes(inputCompressedBytes int) RecordChannelInputBytes(inputCompressedBytes int)
RecordHeadChannelOpened()
RecordChannelTimedOut()
RecordFrame()
} }
type L1Fetcher interface { type L1Fetcher interface {
...@@ -85,7 +88,7 @@ func NewDerivationPipeline(log log.Logger, cfg *rollup.Config, l1Fetcher L1Fetch ...@@ -85,7 +88,7 @@ func NewDerivationPipeline(log log.Logger, cfg *rollup.Config, l1Fetcher L1Fetch
dataSrc := NewDataSourceFactory(log, cfg, l1Fetcher) // auxiliary stage for L1Retrieval dataSrc := NewDataSourceFactory(log, cfg, l1Fetcher) // auxiliary stage for L1Retrieval
l1Src := NewL1Retrieval(log, dataSrc, l1Traversal) l1Src := NewL1Retrieval(log, dataSrc, l1Traversal)
frameQueue := NewFrameQueue(log, l1Src) frameQueue := NewFrameQueue(log, l1Src)
bank := NewChannelBank(log, cfg, frameQueue, l1Fetcher) bank := NewChannelBank(log, cfg, frameQueue, l1Fetcher, metrics)
chInReader := NewChannelInReader(log, bank, metrics) chInReader := NewChannelInReader(log, bank, metrics)
batchQueue := NewBatchQueue(log, cfg, chInReader) batchQueue := NewBatchQueue(log, cfg, chInReader)
attrBuilder := NewFetchingAttributesBuilder(cfg, l1Fetcher, engine) attrBuilder := NewFetchingAttributesBuilder(cfg, l1Fetcher, engine)
......
...@@ -23,6 +23,9 @@ type Metrics interface { ...@@ -23,6 +23,9 @@ type Metrics interface {
RecordL1Ref(name string, ref eth.L1BlockRef) RecordL1Ref(name string, ref eth.L1BlockRef)
RecordL2Ref(name string, ref eth.L2BlockRef) RecordL2Ref(name string, ref eth.L2BlockRef)
RecordChannelInputBytes(inputCompressedBytes int) RecordChannelInputBytes(inputCompressedBytes int)
RecordHeadChannelOpened()
RecordChannelTimedOut()
RecordFrame()
RecordUnsafePayloadsBuffer(length uint64, memSize uint64, next eth.BlockID) RecordUnsafePayloadsBuffer(length uint64, memSize uint64, next eth.BlockID)
......
...@@ -44,6 +44,15 @@ func (t *TestDerivationMetrics) RecordChannelInputBytes(inputCompressedBytes int ...@@ -44,6 +44,15 @@ func (t *TestDerivationMetrics) RecordChannelInputBytes(inputCompressedBytes int
} }
} }
func (t *TestDerivationMetrics) RecordHeadChannelOpened() {
}
func (t *TestDerivationMetrics) RecordChannelTimedOut() {
}
func (t *TestDerivationMetrics) RecordFrame() {
}
type TestRPCMetrics struct{} type TestRPCMetrics struct{}
func (n *TestRPCMetrics) RecordRPCServerRequest(method string) func() { func (n *TestRPCMetrics) RecordRPCServerRequest(method string) func() {
......
...@@ -17,6 +17,10 @@ func (e *ErrFailedPermanently) Error() string { ...@@ -17,6 +17,10 @@ func (e *ErrFailedPermanently) Error() string {
return fmt.Sprintf("operation failed permanently after %d attempts: %v", e.attempts, e.LastErr) return fmt.Sprintf("operation failed permanently after %d attempts: %v", e.attempts, e.LastErr)
} }
func (e *ErrFailedPermanently) Unwrap() error {
return e.LastErr
}
type pair[T, U any] struct { type pair[T, U any] struct {
a T a T
b U b U
...@@ -35,37 +39,27 @@ func Do2[T, U any](ctx context.Context, maxAttempts int, strategy Strategy, op f ...@@ -35,37 +39,27 @@ func Do2[T, U any](ctx context.Context, maxAttempts int, strategy Strategy, op f
// with delays in between each retry according to the provided // with delays in between each retry according to the provided
// Strategy. // Strategy.
func Do[T any](ctx context.Context, maxAttempts int, strategy Strategy, op func() (T, error)) (T, error) { func Do[T any](ctx context.Context, maxAttempts int, strategy Strategy, op func() (T, error)) (T, error) {
var empty T var empty, ret T
var err error
if maxAttempts < 1 { if maxAttempts < 1 {
return empty, fmt.Errorf("need at least 1 attempt to run op, but have %d max attempts", maxAttempts) return empty, fmt.Errorf("need at least 1 attempt to run op, but have %d max attempts", maxAttempts)
} }
var attempt int
reattemptCh := make(chan struct{}, 1) for i := 0; i < maxAttempts; i++ {
doReattempt := func() { if ctx.Err() != nil {
reattemptCh <- struct{}{}
}
doReattempt()
for {
select {
case <-ctx.Done():
return empty, ctx.Err() return empty, ctx.Err()
case <-reattemptCh:
attempt++
ret, err := op()
if err == nil {
return ret, nil
}
if attempt == maxAttempts {
return empty, &ErrFailedPermanently{
attempts: maxAttempts,
LastErr: err,
}
}
time.AfterFunc(strategy.Duration(attempt-1), doReattempt)
} }
ret, err = op()
if err == nil {
return ret, nil
}
// Don't sleep when we are about to exit the loop & return ErrFailedPermanently
if i != maxAttempts-1 {
time.Sleep(strategy.Duration(i))
}
}
return empty, &ErrFailedPermanently{
attempts: maxAttempts,
LastErr: err,
} }
} }
...@@ -159,7 +159,7 @@ func TestSend(t *testing.T) { ...@@ -159,7 +159,7 @@ func TestSend(t *testing.T) {
{sendErr: true}, {sendErr: true},
{}, {},
}, },
nonces: []uint64{0, 1, 1}, nonces: []uint64{0, 1},
total: 3 * time.Second, total: 3 * time.Second,
}, },
} }
......
...@@ -17,6 +17,7 @@ import ( ...@@ -17,6 +17,7 @@ import (
"github.com/ethereum/go-ethereum/core/types" "github.com/ethereum/go-ethereum/core/types"
"github.com/ethereum/go-ethereum/log" "github.com/ethereum/go-ethereum/log"
"github.com/ethereum-optimism/optimism/op-service/backoff"
"github.com/ethereum-optimism/optimism/op-service/txmgr/metrics" "github.com/ethereum-optimism/optimism/op-service/txmgr/metrics"
) )
...@@ -175,7 +176,13 @@ func (m *SimpleTxManager) send(ctx context.Context, candidate TxCandidate) (*typ ...@@ -175,7 +176,13 @@ func (m *SimpleTxManager) send(ctx context.Context, candidate TxCandidate) (*typ
ctx, cancel = context.WithTimeout(ctx, m.cfg.TxSendTimeout) ctx, cancel = context.WithTimeout(ctx, m.cfg.TxSendTimeout)
defer cancel() defer cancel()
} }
tx, err := m.craftTx(ctx, candidate) tx, err := backoff.Do(ctx, 30, backoff.Fixed(2*time.Second), func() (*types.Transaction, error) {
tx, err := m.craftTx(ctx, candidate)
if err != nil {
m.l.Warn("Failed to create a transaction, will retry", "err", err)
}
return tx, err
})
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to create the tx: %w", err) return nil, fmt.Errorf("failed to create the tx: %w", err)
} }
......
...@@ -38,21 +38,21 @@ ...@@ -38,21 +38,21 @@
"url": "https://github.com/ethereum-optimism/optimism.git" "url": "https://github.com/ethereum-optimism/optimism.git"
}, },
"dependencies": { "dependencies": {
"@eth-optimism/common-ts": "0.8.3", "@eth-optimism/common-ts": "workspace:*",
"@eth-optimism/contracts-bedrock": "0.16.0", "@eth-optimism/contracts-bedrock": "workspace:*",
"@eth-optimism/contracts-periphery": "1.0.8", "@eth-optimism/contracts-periphery": "1.0.8",
"@eth-optimism/core-utils": "0.12.2", "@eth-optimism/core-utils": "workspace:*",
"@eth-optimism/sdk": "3.1.0", "@eth-optimism/sdk": "workspace:*",
"@types/dateformat": "^5.0.0", "@types/dateformat": "^5.0.0",
"chai-as-promised": "^7.1.1", "chai-as-promised": "^7.1.1",
"dateformat": "^4.5.1", "dateformat": "^4.5.1",
"dotenv": "^16.1.4", "dotenv": "^16.3.1",
"ethers": "^5.7.0" "ethers": "^5.7.2"
}, },
"devDependencies": { "devDependencies": {
"@ethersproject/abstract-provider": "^5.7.0", "@ethersproject/abstract-provider": "^5.7.0",
"@nomiclabs/hardhat-ethers": "^2.0.6", "@nomiclabs/hardhat-ethers": "^2.0.6",
"@nomiclabs/hardhat-waffle": "^2.0.3", "@nomiclabs/hardhat-waffle": "^2.0.6",
"hardhat": "^2.9.6", "hardhat": "^2.9.6",
"ts-node": "^10.9.1", "ts-node": "^10.9.1",
"tsx": "^3.12.7" "tsx": "^3.12.7"
......
{ {
"extends": "../../tsconfig.json", "extends": "../../tsconfig.json",
"compilerOptions": { "compilerOptions": {
"outDir": "./dist" "outDir": "./dist",
"skipLibCheck": true
}, },
"include": [ "include": [
"package.json", "package.json",
......
...@@ -401,6 +401,7 @@ MintManager_mint_Test:test_mint_moreThanCap_reverts() (gas: 142523) ...@@ -401,6 +401,7 @@ MintManager_mint_Test:test_mint_moreThanCap_reverts() (gas: 142523)
MintManager_upgrade_Test:test_upgrade_fromNotOwner_reverts() (gas: 10974) MintManager_upgrade_Test:test_upgrade_fromNotOwner_reverts() (gas: 10974)
MintManager_upgrade_Test:test_upgrade_fromOwner_succeeds() (gas: 23434) MintManager_upgrade_Test:test_upgrade_fromOwner_succeeds() (gas: 23434)
MintManager_upgrade_Test:test_upgrade_toZeroAddress_reverts() (gas: 11003) MintManager_upgrade_Test:test_upgrade_toZeroAddress_reverts() (gas: 11003)
Multichain:test_script_succeeds() (gas: 3078)
OptimismMintableERC20_Test:test_bridge_succeeds() (gas: 7621) OptimismMintableERC20_Test:test_bridge_succeeds() (gas: 7621)
OptimismMintableERC20_Test:test_burn_notBridge_reverts() (gas: 11164) OptimismMintableERC20_Test:test_burn_notBridge_reverts() (gas: 11164)
OptimismMintableERC20_Test:test_burn_succeeds() (gas: 50996) OptimismMintableERC20_Test:test_burn_succeeds() (gas: 50996)
...@@ -517,8 +518,6 @@ OptimistTest:test_supportsInterface_returnsCorrectInterfaceForERC721_succeeds() ...@@ -517,8 +518,6 @@ OptimistTest:test_supportsInterface_returnsCorrectInterfaceForERC721_succeeds()
OptimistTest:test_tokenIdOfAddress_returnsOwnerID_succeeds() (gas: 63730) OptimistTest:test_tokenIdOfAddress_returnsOwnerID_succeeds() (gas: 63730)
OptimistTest:test_tokenURI_returnsCorrectTokenURI_succeeds() (gas: 195908) OptimistTest:test_tokenURI_returnsCorrectTokenURI_succeeds() (gas: 195908)
OptimistTest:test_transferFrom_soulbound_reverts() (gas: 75512) OptimistTest:test_transferFrom_soulbound_reverts() (gas: 75512)
PostSherlockL1:test_script_succeeds() (gas: 3078)
PostSherlockL2:test_script_succeeds() (gas: 3078)
PreimageOracle_Test:test_keccak256PreimageKey_succeeds() (gas: 319) PreimageOracle_Test:test_keccak256PreimageKey_succeeds() (gas: 319)
PreimageOracle_Test:test_loadKeccak256PreimagePart_outOfBoundsOffset_reverts() (gas: 8993) PreimageOracle_Test:test_loadKeccak256PreimagePart_outOfBoundsOffset_reverts() (gas: 8993)
PreimageOracle_Test:test_loadKeccak256PreimagePart_succeeds() (gas: 76098) PreimageOracle_Test:test_loadKeccak256PreimagePart_succeeds() (gas: 76098)
...@@ -644,10 +643,11 @@ SequencerFeeVault_Test:test_withdraw_toL1_succeeds() (gas: 171357) ...@@ -644,10 +643,11 @@ SequencerFeeVault_Test:test_withdraw_toL1_succeeds() (gas: 171357)
SetPrevBaseFee_Test:test_setPrevBaseFee_succeeds() (gas: 11549) SetPrevBaseFee_Test:test_setPrevBaseFee_succeeds() (gas: 11549)
StandardBridge_Stateless_Test:test_isCorrectTokenPair_succeeds() (gas: 49936) StandardBridge_Stateless_Test:test_isCorrectTokenPair_succeeds() (gas: 49936)
StandardBridge_Stateless_Test:test_isOptimismMintableERC20_succeeds() (gas: 33072) StandardBridge_Stateless_Test:test_isOptimismMintableERC20_succeeds() (gas: 33072)
SystemConfig_Initialize_Test:test_initialize_startBlockOverride_succeeds() (gas: 55905) SystemConfig_Initialize_Test:test_initialize_events_succeeds() (gas: 72059)
SystemConfig_Initialize_Test:test_initialize_startBlockOverride_succeeds() (gas: 65240)
SystemConfig_Initialize_Test:test_initialize_values_succeeds() (gas: 64946) SystemConfig_Initialize_Test:test_initialize_values_succeeds() (gas: 64946)
SystemConfig_Initialize_TestFail:test_initialize_lowGasLimit_reverts() (gas: 94515) SystemConfig_Initialize_TestFail:test_initialize_lowGasLimit_reverts() (gas: 55653)
SystemConfig_Initialize_TestFail:test_initialize_startBlock_reverts() (gas: 64676) SystemConfig_Initialize_TestFail:test_initialize_startBlock_reverts() (gas: 78350)
SystemConfig_Setters_TestFail:test_setBatcherHash_notOwner_reverts() (gas: 15607) SystemConfig_Setters_TestFail:test_setBatcherHash_notOwner_reverts() (gas: 15607)
SystemConfig_Setters_TestFail:test_setGasConfig_notOwner_reverts() (gas: 15577) SystemConfig_Setters_TestFail:test_setGasConfig_notOwner_reverts() (gas: 15577)
SystemConfig_Setters_TestFail:test_setGasLimit_notOwner_reverts() (gas: 15676) SystemConfig_Setters_TestFail:test_setGasLimit_notOwner_reverts() (gas: 15676)
......
# Contributing to CONTRIBUTING.md
First off, thanks for taking the time to contribute!
We welcome and appreciate all kinds of contributions. We ask that before contributing you please review the procedures for each type of contribution available in the [Table of Contents](#table-of-contents). This will streamline the process for both maintainers and contributors. To find ways to contribute, view the [I Want To Contribute](#i-want-to-contribute) section below. Larger contributions should [open an issue](https://github.com/ethereum-optimism/optimism/issues/new) before implementation to ensure changes don't go to waste.
We're excited to work with you and your contributions to scaling Ethereum!
## Table of Contents
- [I Have a Question](#i-have-a-question)
- [I Want To Contribute](#i-want-to-contribute)
- [Reporting Bugs](#reporting-bugs)
- [Suggesting Enhancements](#suggesting-enhancements)
- [Your First Code Contribution](#your-first-code-contribution)
- [Improving The Documentation](#improving-the-documentation)
- [Deploying on Devnet](#deploying-on-devnet)
- [Tools](#tools)
## I Have a Question
> **Note**
> Before making an issue, please read the documentation and search the issues to see if your question has already been answered.
If you have any questions about the smart contracts, please feel free to ask them in the Optimism discord developer channels or create a new detailed issue.
## I Want To Contribute
### Reporting Bugs
**Any and all bug reports on production smart contract code should be submitted privately to the Optimism team so that we can mitigate the issue before it is exploited. Please see our security policy document [here](https://github.com/ethereum-optimism/.github/blob/master/SECURITY.md).**
### Suggesting Enhancements
#### Before Submitting an Enhancement
- Read the documentation and the smart contracts themselves to see if the feature already exists.
- Perform a search in the issues to see if the enhancement has already been suggested. If it has, add a comment to the existing issue instead of opening a new one.
#### How Do I Submit a Good Enhancement Suggestion?
Enhancement suggestions are tracked as [GitHub issues](/issues).
- Use a **clear and descriptive title** for the issue to identify the suggestion.
- Provide a **step-by-step** description of the suggested enhancement in as many details as possible.
- Describe the **current** behavior and why the **intended** behavior you expected to see differs. At this point you can also tell which alternatives do not work for you.
- Explain why this enhancement would be useful in Optimism's smart contracts. You may also want to point out the other projects that solved it better and which could serve as inspiration.
### Your First Code Contribution
The best place to begin contributing is by looking through the issues with the `good first issue` label. These are issues that are relatively easy to implement and are a great way to get familiar with the codebase.
Optimism's smart contracts are written in Solidity and we use [foundry](https://github.com/foundry-rs/foundry) as our development framework. To get started, you'll need to install several dependencies:
1. [pnpm](https://pnpm.io)
1. Make sure to `pnpm install`
1. [foundry](https://getfoundry.sh)
1. Foundry is built with [rust](https://www.rust-lang.org/tools/install), and this project uses a pinned version of foundry. Install the rust toolchain with `rustup`.
1. Make sure to install the version of foundry used by `ci-builder`, defined in the `.foundryrc` file in the root of this repo. Once you have `foundryup` installed, there is a helper to do this: `pnpm install:foundry`
1. [golang](https://golang.org/doc/install)
1. [python](https://www.python.org/downloads/)
Our [Style Guide](STYLE_GUIDE.md) contains information about the project structure, syntax preferences, naming conventions, and more. Please take a look at it before submitting a PR, and let us know if you spot inconcistencies!
Once you've read the styleguide and are ready to work on your PR, there are a plethora of useful `pnpm` scripts to know about that will help you with development:
1. `pnpm build` Builds the smart contracts.
1. `pnpm test` Runs the full `forge` test suite.
1 `pnpm gas-snapshot` Generates the gas snapshot for the smart contracts.
1. `pnpm semver-lock` Generates the semver lockfile.
1. `pnpm storage-snapshot` Generates the storage lockfile.
1. `pnpm autogen:invariant-docs` Generates the invariant test documentation.
1. `pnpm clean` Removes all build artifacts for `forge` and `go` compilations.
1. `pnpm validate-spacers` Validates the positions of the storage slot spacers.
1. `pnpm validate-deploy-configs` Validates the deployment configurations in `deploy-config`
1. `pnpm slither` Runs the slither static analysis tool on the smart contracts.
1. `pnpm lint` Runs the linter on the smart contracts and scripts.
1. `pnpm pre-pr` Runs most checks, generators, and linters prior to a PR. For most PRs, this is sufficient to pass CI if everything is in order.
1. `pnpm pre-pr:full` Runs all checks, generators, and linters prior to a PR.
### Improving The Documentation
Documentation improvements are more than welcome! If you see a typo or feel that a code comment describes something poorly or incorrectly, please submit a PR with a fix.
### Deploying on Devnet
To deploy the smart contracts on a local devnet, run `make devnet-up` in the monorepo root. For more information on the local devnet, see [devnet.md](../../specs/meta/devnet.md).
### Tools
#### Layout Locking
We use a system called "layout locking" as a safety mechanism to prevent certain contract variables from being moved to different storage slots accidentally.
To lock a contract variable, add it to the `layout-lock.json` file which has the following format:
```json
{
"MyContractName": {
"myVariableName": {
"slot": 1,
"offset": 0,
"length": 32
}
}
}
```
With the above config, the `validate-spacers` script will check that we have a contract called `MyContractName`, that the contract has a variable named `myVariableName`, and that the variable is in the correct position as defined in the lock file.
You should add things to the `layout-lock.json` file when you want those variables to **never** change.
Layout locking should be used in combination with diffing the `.storage-layout` file in CI.
#### Gas Snapshots
We use forge's `gas-snapshot` subcommand to produce a gas snapshot for most tests within our suite. CI will check that the gas snapshot has been updated properly when it runs, so make sure to run `pnpm gas-snapshot`!
#### Semver Locking
Many of our smart contracts are semantically versioned. To make sure that changes are not made to a contract without deliberately bumping its version, we commit to the source code and the creation bytecode of its dependencies in a lockfile. Consult the [Style Guide](./STYLE_GUIDE.md#Versioning) for more information about how our contracts are versioned.
#### Storage Snapshots
Due to the many proxied contracts in Optimism's protocol, we automate tracking the diff to storage layouts of the contracts in the project. This is to ensure that we don't break a proxy by upgrading its implementation to a contract with a different storage layout. To generate the storage lockfile, run `pnpm storage-snapshot`.
...@@ -50,78 +50,29 @@ We export contract ABIs, contract source code, and contract deployment informati ...@@ -50,78 +50,29 @@ We export contract ABIs, contract source code, and contract deployment informati
npm install @eth-optimism/contracts-bedrock npm install @eth-optimism/contracts-bedrock
``` ```
## Development ## Contributing
### Dependencies For all information about working on and contributing to Optimism's smart contracts, please see [CONTRIBUTING.md](./CONTRIBUTING.md)
We work on this repository with a combination of [Hardhat](https://hardhat.org) and [Foundry](https://getfoundry.sh/). ## Deployment
1. Install node modules with pnpm (v8) and Node.js (16+):
```shell
pnpm install
```
2. Install the correct version of foundry (defined in the .foundryrc file in the root of this repo.
```shell
pnpm install:foundry
```
### Build
```shell
pnpm build
```
### Tests
```shell
pnpm test
```
### Deployment
The smart contracts are deployed using `foundry` with a `hardhat-deploy` compatibility layer. When the contracts are deployed, The smart contracts are deployed using `foundry` with a `hardhat-deploy` compatibility layer. When the contracts are deployed,
they will write a temp file to disk that can then be formatted into a `hardhat-deploy` style artifact by calling another script. they will write a temp file to disk that can then be formatted into a `hardhat-deploy` style artifact by calling another script.
#### Configuration ### Configuration
Create or modify a file `<network-name>.json` inside of the [`deploy-config`](./deploy-config/) folder. Create or modify a file `<network-name>.json` inside of the [`deploy-config`](./deploy-config/) folder.
By default, the network name will be selected automatically based on the chainid. Alternatively, the `DEPLOYMENT_CONTEXT` env var can be used to override the network name. By default, the network name will be selected automatically based on the chainid. Alternatively, the `DEPLOYMENT_CONTEXT` env var can be used to override the network name.
The spec for the deploy config is defined by the `deployConfigSpec` located inside of the [`hardhat.config.ts`](./hardhat.config.ts). The spec for the deploy config is defined by the `deployConfigSpec` located inside of the [`hardhat.config.ts`](./hardhat.config.ts).
#### Execution ### Execution
1. Set the env vars `ETH_RPC_URL`, `PRIVATE_KEY` and `ETHERSCAN_API_KEY` if contract verification is desired 1. Set the env vars `ETH_RPC_URL`, `PRIVATE_KEY` and `ETHERSCAN_API_KEY` if contract verification is desired
1. Deploy the contracts with `forge script -vvv scripts/Deploy.s.sol:Deploy --rpc-url $ETH_RPC_URL --broadcast --private-key $PRIVATE_KEY` 1. Deploy the contracts with `forge script -vvv scripts/Deploy.s.sol:Deploy --rpc-url $ETH_RPC_URL --broadcast --private-key $PRIVATE_KEY`
Pass the `--verify` flag to verify the deployments automatically with Etherscan. Pass the `--verify` flag to verify the deployments automatically with Etherscan.
1. Generate the hardhat deploy artifacts with `forge script -vvv scripts/Deploy.s.sol:Deploy --sig 'sync()' --rpc-url $ETH_RPC_URL --broadcast --private-key $PRIVATE_KEY` 1. Generate the hardhat deploy artifacts with `forge script -vvv scripts/Deploy.s.sol:Deploy --sig 'sync()' --rpc-url $ETH_RPC_URL --broadcast --private-key $PRIVATE_KEY`
#### Deploying a single contract ### Deploying a single contract
All of the functions for deploying a single contract are `public` meaning that the `--sig` argument to `forge script` can be used to All of the functions for deploying a single contract are `public` meaning that the `--sig` argument to `forge script` can be used to
target the deployment of a single contract. target the deployment of a single contract.
## Tools
### Layout Locking
We use a system called "layout locking" as a safety mechanism to prevent certain contract variables from being moved to different storage slots accidentally.
To lock a contract variable, add it to the `layout-lock.json` file which has the following format:
```json
{
"MyContractName": {
"myVariableName": {
"slot": 1,
"offset": 0,
"length": 32
}
}
}
```
With the above config, the `validate-spacers` hardhat task will check that we have a contract called `MyContractName`, that the contract has a variable named `myVariableName`, and that the variable is in the correct position as defined in the lock file.
You should add things to the `layout-lock.json` file when you want those variables to **never** change.
Layout locking should be used in combination with diffing the `.storage-layout` file in CI.
...@@ -52,5 +52,7 @@ ...@@ -52,5 +52,7 @@
"l2GenesisRegolithTimeOffset": "0x0", "l2GenesisRegolithTimeOffset": "0x0",
"eip1559Denominator": 50, "eip1559Denominator": 50,
"eip1559Elasticity": 10 "eip1559Elasticity": 10,
"systemConfigStartBlock": 0
} }
...@@ -5,6 +5,7 @@ ...@@ -5,6 +5,7 @@
"l1ChainID": 5, "l1ChainID": 5,
"l2ChainID": 420, "l2ChainID": 420,
"l2BlockTime": 2, "l2BlockTime": 2,
"l1BlockTime": 12,
"maxSequencerDrift": 600, "maxSequencerDrift": 600,
"sequencerWindowSize": 3600, "sequencerWindowSize": 3600,
"channelTimeout": 300, "channelTimeout": 300,
...@@ -36,5 +37,6 @@ ...@@ -36,5 +37,6 @@
"l2GenesisBlockGasLimit": "0x17D7840", "l2GenesisBlockGasLimit": "0x17D7840",
"l2GenesisBlockBaseFeePerGas": "0x3b9aca00", "l2GenesisBlockBaseFeePerGas": "0x3b9aca00",
"eip1559Denominator": 50, "eip1559Denominator": 50,
"eip1559Elasticity": 10 "eip1559Elasticity": 10,
"systemConfigStartBlock": 8300214
} }
...@@ -38,5 +38,6 @@ ...@@ -38,5 +38,6 @@
"numDeployConfirmations": 1, "numDeployConfirmations": 1,
"eip1559Denominator": 50, "eip1559Denominator": 50,
"eip1559Elasticity": 10, "eip1559Elasticity": 10,
"l2GenesisRegolithTimeOffset": "0x0" "l2GenesisRegolithTimeOffset": "0x0",
"systemConfigStartBlock": 0
} }
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
"prebuild": "./scripts/verify-foundry-install.sh", "prebuild": "./scripts/verify-foundry-install.sh",
"build:differential": "go build -o ./scripts/differential-testing/differential-testing ./scripts/differential-testing", "build:differential": "go build -o ./scripts/differential-testing/differential-testing ./scripts/differential-testing",
"build:fuzz": "(cd test-case-generator && go build ./cmd/fuzz.go)", "build:fuzz": "(cd test-case-generator && go build ./cmd/fuzz.go)",
"autogen:invariant-docs": "tsx scripts/invariant-doc-gen.ts", "autogen:invariant-docs": "npx tsx scripts/invariant-doc-gen.ts",
"test": "pnpm build:differential && pnpm build:fuzz && forge test", "test": "pnpm build:differential && pnpm build:fuzz && forge test",
"coverage": "pnpm build:differential && pnpm build:fuzz && forge coverage", "coverage": "pnpm build:differential && pnpm build:fuzz && forge coverage",
"coverage:lcov": "pnpm build:differential && pnpm build:fuzz && forge coverage --report lcov", "coverage:lcov": "pnpm build:differential && pnpm build:fuzz && forge coverage --report lcov",
...@@ -29,8 +29,10 @@ ...@@ -29,8 +29,10 @@
"slither:triage": "TRIAGE_MODE=1 ./scripts/slither.sh", "slither:triage": "TRIAGE_MODE=1 ./scripts/slither.sh",
"clean": "rm -rf ./artifacts ./forge-artifacts ./cache ./tsconfig.tsbuildinfo ./tsconfig.build.tsbuildinfo ./test-case-generator/fuzz ./scripts/differential-testing/differential-testing", "clean": "rm -rf ./artifacts ./forge-artifacts ./cache ./tsconfig.tsbuildinfo ./tsconfig.build.tsbuildinfo ./test-case-generator/fuzz ./scripts/differential-testing/differential-testing",
"preinstall": "npx only-allow pnpm", "preinstall": "npx only-allow pnpm",
"pre-pr": "pnpm clean && pnpm gas-snapshot && pnpm storage-snapshot && pnpm semver-lock && pnpm autogen:invariant-docs && pnpm lint && (cd ../../op-bindings && make)",
"pre-pr:full": "pnpm test && pnpm slither && pnpm validate-deploy-configs && pnpm validate-spacers && pnpm pre-pr",
"lint:ts:check": "eslint . --max-warnings=0", "lint:ts:check": "eslint . --max-warnings=0",
"lint:forge-tests:check": "tsx scripts/forge-test-names.ts", "lint:forge-tests:check": "npx tsx scripts/forge-test-names.ts",
"lint:contracts:check": "pnpm lint:fix && git diff --exit-code", "lint:contracts:check": "pnpm lint:fix && git diff --exit-code",
"lint:check": "pnpm lint:contracts:check && pnpm lint:ts:check", "lint:check": "pnpm lint:contracts:check && pnpm lint:ts:check",
"lint:ts:fix": "eslint --fix .", "lint:ts:fix": "eslint --fix .",
......
...@@ -65,13 +65,20 @@ abstract contract Deployer is Script { ...@@ -65,13 +65,20 @@ abstract contract Deployer is Script {
/// bytes32(uint256(keccak256('eip1967.proxy.admin')) - 1) /// bytes32(uint256(keccak256('eip1967.proxy.admin')) - 1)
bytes32 internal constant OWNER_KEY = 0xb53127684a568b3173ae13b9f8a6016e243e63b6e8ee1178d6a717850b5d6103; bytes32 internal constant OWNER_KEY = 0xb53127684a568b3173ae13b9f8a6016e243e63b6e8ee1178d6a717850b5d6103;
/// @notice Create the global variables and set up the filesystem /// @notice Create the global variables and set up the filesystem.
/// Forge script will create a file where the prefix is the
/// name of the function that runs with the suffix `-latest.json`.
/// By default, `run()` is called. Allow the user to use the SIG
/// env var to specify what function signature was called so that
/// the `sync()` method can be used to create hardhat deploy style
/// artifacts.
function setUp() public virtual { function setUp() public virtual {
string memory root = vm.projectRoot(); string memory root = vm.projectRoot();
deployScript = vm.envOr("DEPLOY_SCRIPT", name()); deployScript = vm.envOr("DEPLOY_SCRIPT", name());
deploymentContext = _getDeploymentContext(); deploymentContext = _getDeploymentContext();
string memory deployFile = vm.envOr("DEPLOY_FILE", string("run-latest.json")); string memory sig = vm.envOr("SIG", string("run"));
string memory deployFile = vm.envOr("DEPLOY_FILE", string.concat(sig, "-latest.json"));
uint256 chainId = vm.envOr("CHAIN_ID", block.chainid); uint256 chainId = vm.envOr("CHAIN_ID", block.chainid);
deployPath = string.concat(root, "/broadcast/", deployScript, ".s.sol/", vm.toString(chainId), "/", deployFile); deployPath = string.concat(root, "/broadcast/", deployScript, ".s.sol/", vm.toString(chainId), "/", deployFile);
......
...@@ -97,7 +97,7 @@ contract DeleteOutput is SafeBuilder { ...@@ -97,7 +97,7 @@ contract DeleteOutput is SafeBuilder {
_postCheck(); _postCheck();
} }
function buildCalldata(address _proxyAdmin) internal view override returns (bytes memory) { function buildCalldata(address) internal view override returns (bytes memory) {
IMulticall3.Call3[] memory calls = new IMulticall3.Call3[](1); IMulticall3.Call3[] memory calls = new IMulticall3.Call3[](1);
calls[0] = IMulticall3.Call3({ calls[0] = IMulticall3.Call3({
......
...@@ -56,7 +56,13 @@ abstract contract SafeBuilder is EnhancedScript, GlobalConstants { ...@@ -56,7 +56,13 @@ abstract contract SafeBuilder is EnhancedScript, GlobalConstants {
if (block.chainid == OP_GOERLI) { if (block.chainid == OP_GOERLI) {
safe = 0xE534ccA2753aCFbcDBCeB2291F596fc60495257e; safe = 0xE534ccA2753aCFbcDBCeB2291F596fc60495257e;
proxyAdmin = 0x4200000000000000000000000000000000000018; proxyAdmin = 0x4200000000000000000000000000000000000018;
} else if (block.chainid == GOERLI) {
safe = 0xBc1233d0C3e6B5d53Ab455cF65A6623F6dCd7e4f;
proxyAdmin = 0x01d3670863c3F4b24D7b107900f0b75d4BbC6e0d;
} }
console.log("ChainID: %s", block.chainid);
console.log("Using Safe: %s", safe);
console.log("Using ProxyAdmin: %s", proxyAdmin);
return run(safe, proxyAdmin); return run(safe, proxyAdmin);
} }
......
This diff is collapsed.
...@@ -6,7 +6,7 @@ ...@@ -6,7 +6,7 @@
"src/L1/L1StandardBridge.sol": "0xbd7b303cefe46bc14bf1a2b81e5702ff45ce9c5257524e59778e11c75f7f5bdc", "src/L1/L1StandardBridge.sol": "0xbd7b303cefe46bc14bf1a2b81e5702ff45ce9c5257524e59778e11c75f7f5bdc",
"src/L1/L2OutputOracle.sol": "0x05ea17a834563ffa50cade81189b120b6f0805ba316d6a9893c8cf8b231e57e3", "src/L1/L2OutputOracle.sol": "0x05ea17a834563ffa50cade81189b120b6f0805ba316d6a9893c8cf8b231e57e3",
"src/L1/OptimismPortal.sol": "0xeefcc16d30e14ed7ce9970f3aeaf1d5668324b3fc1ddb4790da5804cfdd78980", "src/L1/OptimismPortal.sol": "0xeefcc16d30e14ed7ce9970f3aeaf1d5668324b3fc1ddb4790da5804cfdd78980",
"src/L1/SystemConfig.sol": "0x29beec0a03b9602a53e3ceaec2354972d917f8b80f1b3a8f03f4fb7a67753fce", "src/L1/SystemConfig.sol": "0xd5904b53153f2e32be28f2ce1ea51374b19fdf4f478f486f02cf67d8b62b7c29",
"src/L2/BaseFeeVault.sol": "0xd8df28898799b80c370e77e9aad09f79235dfda2bf13e56daf21997cfe54200d", "src/L2/BaseFeeVault.sol": "0xd8df28898799b80c370e77e9aad09f79235dfda2bf13e56daf21997cfe54200d",
"src/L2/GasPriceOracle.sol": "0xb7d8c4f3ea8db31900125e341aae42a862a2b7d3f1c1aa60c97dc2d0e022b7ba", "src/L2/GasPriceOracle.sol": "0xb7d8c4f3ea8db31900125e341aae42a862a2b7d3f1c1aa60c97dc2d0e022b7ba",
"src/L2/L1Block.sol": "0x38ea78a9611656a60ae4d58db75e96413a638e3ccb2e935052441f98a1fd3105", "src/L2/L1Block.sol": "0x38ea78a9611656a60ae4d58db75e96413a638e3ccb2e935052441f98a1fd3105",
......
...@@ -98,11 +98,11 @@ contract SystemConfig is OwnableUpgradeable, Semver { ...@@ -98,11 +98,11 @@ contract SystemConfig is OwnableUpgradeable, Semver {
/// @notice The block at which the op-node can start searching for logs from. /// @notice The block at which the op-node can start searching for logs from.
uint256 public startBlock; uint256 public startBlock;
/// @custom:semver 1.5.0 /// @custom:semver 1.6.0
/// @notice Constructs the SystemConfig contract. Cannot set /// @notice Constructs the SystemConfig contract. Cannot set
/// the owner to `address(0)` due to the Ownable contract's /// the owner to `address(0)` due to the Ownable contract's
/// implementation, so set it to `address(0xdEaD)` /// implementation, so set it to `address(0xdEaD)`
constructor() Semver(1, 5, 0) { constructor() Semver(1, 6, 0) {
initialize({ initialize({
_owner: address(0xdEaD), _owner: address(0xdEaD),
_overhead: 0, _overhead: 0,
...@@ -165,12 +165,12 @@ contract SystemConfig is OwnableUpgradeable, Semver { ...@@ -165,12 +165,12 @@ contract SystemConfig is OwnableUpgradeable, Semver {
__Ownable_init(); __Ownable_init();
transferOwnership(_owner); transferOwnership(_owner);
overhead = _overhead; // These are set in ascending order of their UpdateTypes.
scalar = _scalar; _setBatcherHash(_batcherHash);
batcherHash = _batcherHash; _setGasConfig({ _overhead: _overhead, _scalar: _scalar });
gasLimit = _gasLimit; _setGasLimit(_gasLimit);
_setUnsafeBlockSigner(_unsafeBlockSigner);
_setAddress(_unsafeBlockSigner, UNSAFE_BLOCK_SIGNER_SLOT);
_setAddress(_batchInbox, BATCH_INBOX_SLOT); _setAddress(_batchInbox, BATCH_INBOX_SLOT);
_setAddress(_addresses.l1CrossDomainMessenger, L1_CROSS_DOMAIN_MESSENGER_SLOT); _setAddress(_addresses.l1CrossDomainMessenger, L1_CROSS_DOMAIN_MESSENGER_SLOT);
_setAddress(_addresses.l1ERC721Bridge, L1_ERC_721_BRIDGE_SLOT); _setAddress(_addresses.l1ERC721Bridge, L1_ERC_721_BRIDGE_SLOT);
...@@ -282,28 +282,47 @@ contract SystemConfig is OwnableUpgradeable, Semver { ...@@ -282,28 +282,47 @@ contract SystemConfig is OwnableUpgradeable, Semver {
} }
} }
/// @notice Updates the unsafe block signer address. /// @notice Updates the unsafe block signer address. Can only be called by the owner.
/// @param _unsafeBlockSigner New unsafe block signer address. /// @param _unsafeBlockSigner New unsafe block signer address.
function setUnsafeBlockSigner(address _unsafeBlockSigner) external onlyOwner { function setUnsafeBlockSigner(address _unsafeBlockSigner) external onlyOwner {
_setUnsafeBlockSigner(_unsafeBlockSigner);
}
/// @notice Updates the unsafe block signer address.
/// @param _unsafeBlockSigner New unsafe block signer address.
function _setUnsafeBlockSigner(address _unsafeBlockSigner) internal {
_setAddress(_unsafeBlockSigner, UNSAFE_BLOCK_SIGNER_SLOT); _setAddress(_unsafeBlockSigner, UNSAFE_BLOCK_SIGNER_SLOT);
bytes memory data = abi.encode(_unsafeBlockSigner); bytes memory data = abi.encode(_unsafeBlockSigner);
emit ConfigUpdate(VERSION, UpdateType.UNSAFE_BLOCK_SIGNER, data); emit ConfigUpdate(VERSION, UpdateType.UNSAFE_BLOCK_SIGNER, data);
} }
/// @notice Updates the batcher hash. /// @notice Updates the batcher hash. Can only be called by the owner.
/// @param _batcherHash New batcher hash. /// @param _batcherHash New batcher hash.
function setBatcherHash(bytes32 _batcherHash) external onlyOwner { function setBatcherHash(bytes32 _batcherHash) external onlyOwner {
_setBatcherHash(_batcherHash);
}
/// @notice Internal function for updating the batcher hash.
/// @param _batcherHash New batcher hash.
function _setBatcherHash(bytes32 _batcherHash) internal {
batcherHash = _batcherHash; batcherHash = _batcherHash;
bytes memory data = abi.encode(_batcherHash); bytes memory data = abi.encode(_batcherHash);
emit ConfigUpdate(VERSION, UpdateType.BATCHER, data); emit ConfigUpdate(VERSION, UpdateType.BATCHER, data);
} }
/// @notice Updates gas config. /// @notice Updates gas config. Can only be called by the owner.
/// @param _overhead New overhead value. /// @param _overhead New overhead value.
/// @param _scalar New scalar value. /// @param _scalar New scalar value.
function setGasConfig(uint256 _overhead, uint256 _scalar) external onlyOwner { function setGasConfig(uint256 _overhead, uint256 _scalar) external onlyOwner {
_setGasConfig(_overhead, _scalar);
}
/// @notice Internal function for updating the gas config.
/// @param _overhead New overhead value.
/// @param _scalar New scalar value.
function _setGasConfig(uint256 _overhead, uint256 _scalar) internal {
overhead = _overhead; overhead = _overhead;
scalar = _scalar; scalar = _scalar;
...@@ -311,9 +330,15 @@ contract SystemConfig is OwnableUpgradeable, Semver { ...@@ -311,9 +330,15 @@ contract SystemConfig is OwnableUpgradeable, Semver {
emit ConfigUpdate(VERSION, UpdateType.GAS_CONFIG, data); emit ConfigUpdate(VERSION, UpdateType.GAS_CONFIG, data);
} }
/// @notice Updates the L2 gas limit. /// @notice Updates the L2 gas limit. Can only be called by the owner.
/// @param _gasLimit New gas limit. /// @param _gasLimit New gas limit.
function setGasLimit(uint64 _gasLimit) external onlyOwner { function setGasLimit(uint64 _gasLimit) external onlyOwner {
_setGasLimit(_gasLimit);
}
/// @notice Internal function for updating the L2 gas limit.
/// @param _gasLimit New gas limit.
function _setGasLimit(uint64 _gasLimit) internal {
require(_gasLimit >= minimumGasLimit(), "SystemConfig: gas limit too low"); require(_gasLimit >= minimumGasLimit(), "SystemConfig: gas limit too low");
gasLimit = _gasLimit; gasLimit = _gasLimit;
......
...@@ -51,6 +51,9 @@ contract CommonTest is Test { ...@@ -51,6 +51,9 @@ contract CommonTest is Test {
event TransactionDeposited(address indexed from, address indexed to, uint256 indexed version, bytes opaqueData); event TransactionDeposited(address indexed from, address indexed to, uint256 indexed version, bytes opaqueData);
/// @dev OpenZeppelin Ownable.sol transferOwnership event
event OwnershipTransferred(address indexed previousOwner, address indexed newOwner);
FFIInterface ffi; FFIInterface ffi;
function setUp() public virtual { function setUp() public virtual {
......
...@@ -26,9 +26,6 @@ contract XDomainSetter3 is CrossDomainOwnable3 { ...@@ -26,9 +26,6 @@ contract XDomainSetter3 is CrossDomainOwnable3 {
contract CrossDomainOwnable3_Test is Messenger_Initializer { contract CrossDomainOwnable3_Test is Messenger_Initializer {
XDomainSetter3 setter; XDomainSetter3 setter;
/// @dev OpenZeppelin Ownable.sol transferOwnership event
event OwnershipTransferred(address indexed previousOwner, address indexed newOwner);
/// @dev CrossDomainOwnable3.sol transferOwnership event /// @dev CrossDomainOwnable3.sol transferOwnership event
event OwnershipTransferred(address indexed previousOwner, address indexed newOwner, bool isLocal); event OwnershipTransferred(address indexed previousOwner, address indexed newOwner, bool isLocal);
......
...@@ -18,6 +18,8 @@ contract SystemConfig_Init is CommonTest { ...@@ -18,6 +18,8 @@ contract SystemConfig_Init is CommonTest {
SystemConfig sysConf; SystemConfig sysConf;
SystemConfig systemConfigImpl; SystemConfig systemConfigImpl;
event ConfigUpdate(uint256 indexed version, SystemConfig.UpdateType indexed updateType, bytes data);
// Dummy addresses used to test getters // Dummy addresses used to test getters
address constant batchInbox = address(0x18); address constant batchInbox = address(0x18);
address constant l1CrossDomainMessenger = address(0x20); address constant l1CrossDomainMessenger = address(0x20);
...@@ -138,6 +140,51 @@ contract SystemConfig_Initialize_Test is SystemConfig_Init { ...@@ -138,6 +140,51 @@ contract SystemConfig_Initialize_Test is SystemConfig_Init {
); );
assertEq(sysConf.startBlock(), startBlock); assertEq(sysConf.startBlock(), startBlock);
} }
/// @dev Ensures that the events are emitted during initialization.
function test_initialize_events_succeeds() external {
// Wipe out the initialized slot so the proxy can be initialized again
vm.store(address(sysConf), bytes32(0), bytes32(0));
vm.store(address(sysConf), bytes32(uint256(106)), bytes32(0));
assertEq(sysConf.startBlock(), 0);
// The order depends here
vm.expectEmit(true, true, true, true, address(sysConf));
emit ConfigUpdate(0, SystemConfig.UpdateType.BATCHER, abi.encode(batcherHash));
vm.expectEmit(true, true, true, true, address(sysConf));
emit ConfigUpdate(0, SystemConfig.UpdateType.GAS_CONFIG, abi.encode(overhead, scalar));
vm.expectEmit(true, true, true, true, address(sysConf));
emit ConfigUpdate(0, SystemConfig.UpdateType.GAS_LIMIT, abi.encode(gasLimit));
vm.expectEmit(true, true, true, true, address(sysConf));
emit ConfigUpdate(0, SystemConfig.UpdateType.UNSAFE_BLOCK_SIGNER, abi.encode(unsafeBlockSigner));
vm.prank(multisig);
Proxy(payable(address(sysConf))).upgradeToAndCall(
address(systemConfigImpl),
abi.encodeCall(
SystemConfig.initialize,
(
alice, // _owner,
overhead, // _overhead,
scalar, // _scalar,
batcherHash, // _batcherHash
gasLimit, // _gasLimit,
unsafeBlockSigner, // _unsafeBlockSigner,
Constants.DEFAULT_RESOURCE_CONFIG(), // _config,
0, // _startBlock
batchInbox, // _batchInbox
SystemConfig.Addresses({ // _addresses
l1CrossDomainMessenger: l1CrossDomainMessenger,
l1ERC721Bridge: l1ERC721Bridge,
l1StandardBridge: l1StandardBridge,
l2OutputOracle: l2OutputOracle,
optimismPortal: optimismPortal,
optimismMintableERC20Factory: optimismMintableERC20Factory
})
)
)
);
}
} }
contract SystemConfig_Initialize_TestFail is SystemConfig_Init { contract SystemConfig_Initialize_TestFail is SystemConfig_Init {
...@@ -318,8 +365,6 @@ contract SystemConfig_Setters_TestFail is SystemConfig_Init { ...@@ -318,8 +365,6 @@ contract SystemConfig_Setters_TestFail is SystemConfig_Init {
} }
contract SystemConfig_Setters_Test is SystemConfig_Init { contract SystemConfig_Setters_Test is SystemConfig_Init {
event ConfigUpdate(uint256 indexed version, SystemConfig.UpdateType indexed updateType, bytes data);
/// @dev Tests that `setBatcherHash` updates the batcher hash successfully. /// @dev Tests that `setBatcherHash` updates the batcher hash successfully.
function testFuzz_setBatcherHash_succeeds(bytes32 newBatcherHash) external { function testFuzz_setBatcherHash_succeeds(bytes32 newBatcherHash) external {
vm.expectEmit(true, true, true, true); vm.expectEmit(true, true, true, true);
......
...@@ -43,13 +43,13 @@ ...@@ -43,13 +43,13 @@
"@ethersproject/keccak256": "^5.7.0", "@ethersproject/keccak256": "^5.7.0",
"@ethersproject/properties": "^5.7.0", "@ethersproject/properties": "^5.7.0",
"@ethersproject/rlp": "^5.7.0", "@ethersproject/rlp": "^5.7.0",
"@ethersproject/web": "^5.7.0", "@ethersproject/web": "^5.7.1",
"chai": "^4.3.4", "chai": "^4.3.7",
"ethers": "^5.7.0", "ethers": "^5.7.2",
"node-fetch": "^2.6.7" "node-fetch": "^2.6.7"
}, },
"devDependencies": { "devDependencies": {
"@types/node": "^20.5.0", "@types/node": "^20.5.0",
"mocha": "^10.0.0" "mocha": "^10.2.0"
} }
} }
...@@ -70,7 +70,7 @@ export class Etherscan { ...@@ -70,7 +70,7 @@ export class Etherscan {
url.searchParams.append('apikey', this.apiKey) url.searchParams.append('apikey', this.apiKey)
const response = await fetch(url) const response = await fetch(url)
const result = await response.json() const result = await response.json()
return result.result[0] return (result as { result: number[] }).result[0]
} }
public async getContractABI(address: string): Promise<any> { public async getContractABI(address: string): Promise<any> {
......
...@@ -11,5 +11,7 @@ ignores: [ ...@@ -11,5 +11,7 @@ ignores: [
"eslint-plugin-prettier", "eslint-plugin-prettier",
"chai", "chai",
"ts-node", "ts-node",
"typedoc" "typedoc",
"ethereum-waffle",
"nyc"
] ]
artifacts
cache
typechain
.deps
.envrc
.env
/dist/
module.exports = {
...require('../../.prettierrc.js'),
}
MIT License
Copyright (c) 2023 Optimism
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
This diff is collapsed.
{
"name": "@eth-optimism/web3.js-plugin",
"version": "0.1.0",
"description": "A Web3.js plugin for doing OP-Chain gas estimation",
"license": "MIT",
"repository": {
"type": "git",
"url": "https://github.com/ethereum-optimism/optimism.git",
"directory": "packages/web3js-plugin"
},
"homepage": "https://optimism.io",
"type": "module",
"exports": {
".": {
"import": "./dist/plugin.js",
"require": "./dist/plugin.cjs",
"default": "./dist/plugin.js",
"types": "./src/plugin.d.ts"
}
},
"types": "dist/plugin.d.ts",
"files": [
"dist/",
"src/"
],
"scripts": {
"build": "tsup",
"lint": "prettier --check .",
"lint:fix": "prettier --write .",
"test": "vitest --coverage",
"test:coverage": "vitest run --coverage",
"typecheck": "tsc --noEmit"
},
"devDependencies": {
"@eth-optimism/contracts-ts": "workspace:^",
"@swc/core": "^1.3.76",
"@vitest/coverage-istanbul": "^0.34.1",
"tsup": "^7.2.0",
"typescript": "^5.1.6",
"viem": "^1.6.0",
"vite": "^4.4.9",
"vitest": "^0.34.1",
"zod": "^3.22.0"
},
"dependencies": {
"@ethereumjs/rlp": "^5.0.0",
"web3-eth": "^4.0.3",
"web3-eth-accounts": "^4.0.3"
},
"peerDependencies": {
"web3": ">= 4.0.3 < 5.x"
}
}
This diff is collapsed.
This diff is collapsed.
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"outDir": "./dist",
"baseUrl": "./src",
"noEmit": true,
"target": "ESNext",
"lib": ["esnext"],
"module": "esnext",
"moduleResolution": "Node",
"isolatedModules": true,
"allowUnreachableCode": false,
"skipLibCheck": false,
"allowUnusedLabels": false,
"alwaysStrict": true,
"exactOptionalPropertyTypes": true,
"noFallthroughCasesInSwitch": true,
"noImplicitAny": true,
"noImplicitReturns": true,
"noImplicitOverride": true,
"noImplicitThis": true,
"forceConsistentCasingInFileNames": true,
"verbatimModuleSyntax": true,
"noPropertyAccessFromIndexSignature": true,
"noUncheckedIndexedAccess": true,
"noUnusedLocals": true,
"noUnusedParameters": true,
"strict": true
},
"include": ["./src"]
}
import { defineConfig } from 'tsup'
import packageJson from './package.json'
// @see https://tsup.egoist.dev/
export default defineConfig({
name: packageJson.name,
entry: ['src/plugin.ts'],
outDir: 'dist',
format: ['esm', 'cjs'],
splitting: false,
sourcemap: true,
clean: false,
dts: true
})
import { defineConfig } from 'vitest/config'
// @see https://vitest.dev/config/
export default defineConfig({
test: {
environment: 'jsdom',
coverage: {
provider: 'istanbul',
},
},
})
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment