Commit 7be5a264 authored by mergify[bot]'s avatar mergify[bot] Committed by GitHub

Merge branch 'develop' into felipe/ufm-ci

parents 8620c42c bbd27dd4
......@@ -3,7 +3,7 @@
"changelog": ["@changesets/changelog-github", { "repo": "ethereum-optimism/optimism" }],
"commit": false,
"fixed": [],
"linked": [],
"linked": [["contracts-bedrock", "contracts-ts"]],
"access": "public",
"baseBranch": "develop",
"updateInternalDependencies": "patch",
......
---
'@eth-optimism/sdk': minor
---
Added to and from block filters to several methods in CrossChainMessenger
......@@ -143,6 +143,7 @@ jobs:
- "packages/fault-detector/node_modules"
- "packages/replica-healthcheck/node_modules"
- "packages/sdk/node_modules"
- "packages/contracts-ts/node_modules"
- run:
name: print forge version
command: forge --version
......@@ -196,7 +197,7 @@ jobs:
fi
IMAGE_BASE="<<parameters.registry>>/<<parameters.repo>>/<<parameters.docker_name>>"
DOCKER_TAGS=$(echo -ne <<parameters.docker_tags>> | sed "s/,/\n/g" | sed "s/[^a-zA-Z0-9\n]/-/g" | sed -e "s|^|-t ${IMAGE_BASE}:|")
docker build \
docker build --progress plain \
$(echo -ne $DOCKER_TAGS | tr '\n' ' ') \
-f <<parameters.docker_file>> \
<<parameters.docker_context>>
......@@ -265,7 +266,7 @@ jobs:
DOCKER_TAGS=$(echo -ne <<parameters.docker_tags>> | sed "s/,/\n/g" | sed "s/[^a-zA-Z0-9\n]/-/g" | sed -e "s|^|-t ${IMAGE_BASE}:|")
docker context create buildx-build
docker buildx create --use buildx-build
docker buildx build --platform=<<parameters.platforms>> --target "<<parameters.docker_target>>" --push \
docker buildx build --progress plain --platform=<<parameters.platforms>> --target "<<parameters.docker_target>>" --push \
$(echo -ne $DOCKER_TAGS | tr '\n' ' ') \
-f <<parameters.docker_file>> \
<<parameters.docker_context>>
......@@ -320,7 +321,7 @@ jobs:
DOCKER_TAGS=$(echo -ne <<parameters.docker_tags>> | sed "s/,/\n/g" | sed "s/[^a-zA-Z0-9\n]/-/g" | sed -e "s|^|-t ${IMAGE_BASE}:|")
docker context create buildx-build
docker buildx create --use buildx-build
docker buildx build --platform=<<parameters.platforms>> --target "<<parameters.docker_target>>" --push \
docker buildx build --progress plain --platform=<<parameters.platforms>> --target "<<parameters.docker_target>>" --push \
$(echo -ne $DOCKER_TAGS | tr '\n' ' ') \
-f <<parameters.docker_file>> \
<<parameters.docker_context>>
......@@ -709,6 +710,24 @@ jobs:
name: Upload coverage
command: codecov --verbose --clean --flags <<parameters.coverage_flag>>
contracts-ts-tests:
docker:
- image: us-docker.pkg.dev/oplabs-tools-artifacts/images/ci-builder:latest
resource_class: large
steps:
- checkout
- attach_workspace: { at: "." }
- restore_cache:
name: Restore pnpm Package Cache
keys:
- pnpm-packages-v2-{{ checksum "pnpm.lock.yaml" }}
- check-changed:
patterns: sdk,contracts-bedrock,contracts
- run:
name: Check generated and build
command: pnpm generate:check
working_directory: packages/contracts-ts
sdk-next-tests:
docker:
- image: us-docker.pkg.dev/oplabs-tools-artifacts/images/ci-builder:latest
......@@ -1336,6 +1355,13 @@ workflows:
dependencies: "(common-ts|core-utils|sdk)"
requires:
- pnpm-monorepo
- js-lint-test:
name: contracts-ts-tests
coverage_flag: contracts-ts-tests
package_name: contracts-ts
dependencies: '(contracts-bedrock|contracts-ts)'
requires:
- pnpm-monorepo
- js-lint-test:
name: sdk-next-tests
coverage_flag: sdk-next-tests
......@@ -1374,7 +1400,6 @@ workflows:
name: indexer-tests
binary_name: indexer
working_directory: indexer
dependencies: op-bindings
- go-lint-test-build:
name: op-heartbeat tests
binary_name: op-heartbeat
......
......@@ -3,21 +3,27 @@ module github.com/ethereum-optimism/optimism
go 1.19
require (
github.com/BurntSushi/toml v1.3.2
github.com/btcsuite/btcd v0.23.3
github.com/btcsuite/btcd/chaincfg/chainhash v1.0.1
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.1.0
github.com/ethereum-optimism/go-ethereum-hdwallet v0.1.3
github.com/ethereum/go-ethereum v1.11.6
github.com/fsnotify/fsnotify v1.6.0
github.com/go-chi/chi/v5 v5.0.0
github.com/golang/snappy v0.0.5-0.20220116011046-fa5810519dcb
github.com/google/go-cmp v0.5.9
github.com/google/gofuzz v1.2.1-0.20220503160820-4a35382e8fc8
github.com/google/uuid v1.3.0
github.com/gorilla/mux v1.8.0
github.com/hashicorp/go-multierror v1.1.1
github.com/hashicorp/golang-lru v0.5.5-0.20210104140557-80c98217689d
github.com/hashicorp/golang-lru/v2 v2.0.1
github.com/holiman/uint256 v1.2.2-0.20230321075855-87b91420868c
github.com/ipfs/go-datastore v0.6.0
github.com/ipfs/go-ds-leveldb v0.5.0
github.com/jackc/pgtype v1.14.0
github.com/lib/pq v1.10.9
github.com/libp2p/go-libp2p v0.25.1
github.com/libp2p/go-libp2p-pubsub v0.9.3
github.com/libp2p/go-libp2p-testing v0.12.0
......@@ -29,13 +35,17 @@ require (
github.com/pkg/errors v0.9.1
github.com/pkg/profile v1.7.0
github.com/prometheus/client_golang v1.14.0
github.com/rs/cors v1.8.2
github.com/stretchr/testify v1.8.1
github.com/urfave/cli v1.22.2
github.com/urfave/cli/v2 v2.25.7
golang.org/x/crypto v0.6.0
golang.org/x/crypto v0.8.0
golang.org/x/exp v0.0.0-20230213192124-5e25df0256eb
golang.org/x/sync v0.1.0
golang.org/x/term v0.6.0
golang.org/x/term v0.7.0
golang.org/x/time v0.0.0-20220922220347-f3bd1da661af
gorm.io/driver/postgres v1.5.2
gorm.io/gorm v1.25.2
)
require (
......@@ -81,7 +91,6 @@ require (
github.com/golang/protobuf v1.5.2 // indirect
github.com/google/gopacket v1.1.19 // indirect
github.com/google/pprof v0.0.0-20230207041349-798e818bf904 // indirect
github.com/google/uuid v1.3.0 // indirect
github.com/gorilla/websocket v1.5.0 // indirect
github.com/graph-gophers/graphql-go v1.3.0 // indirect
github.com/hashicorp/errwrap v1.1.0 // indirect
......@@ -93,9 +102,15 @@ require (
github.com/influxdata/line-protocol v0.0.0-20210311194329-9aa0e372d097 // indirect
github.com/ipfs/go-cid v0.3.2 // indirect
github.com/ipfs/go-log/v2 v2.5.1 // indirect
github.com/jackc/pgio v1.0.0 // indirect
github.com/jackc/pgpassfile v1.0.0 // indirect
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a // indirect
github.com/jackc/pgx/v5 v5.3.1 // indirect
github.com/jackpal/go-nat-pmp v1.0.2 // indirect
github.com/jbenet/go-temp-err-catcher v0.1.0 // indirect
github.com/jbenet/goprocess v0.1.4 // indirect
github.com/jinzhu/inflection v1.0.0 // indirect
github.com/jinzhu/now v1.1.5 // indirect
github.com/klauspost/compress v1.15.15 // indirect
github.com/klauspost/cpuid/v2 v2.2.3 // indirect
github.com/koron/go-ssdp v0.0.3 // indirect
......@@ -147,7 +162,6 @@ require (
github.com/raulk/go-watchdog v1.3.0 // indirect
github.com/rivo/uniseg v0.4.3 // indirect
github.com/rogpeppe/go-internal v1.9.0 // indirect
github.com/rs/cors v1.8.2 // indirect
github.com/russross/blackfriday/v2 v2.1.0 // indirect
github.com/shirou/gopsutil v3.21.11+incompatible // indirect
github.com/spaolacci/murmur3 v1.1.0 // indirect
......@@ -165,9 +179,9 @@ require (
go.uber.org/multierr v1.9.0 // indirect
go.uber.org/zap v1.24.0 // indirect
golang.org/x/mod v0.9.0 // indirect
golang.org/x/net v0.8.0 // indirect
golang.org/x/sys v0.6.0 // indirect
golang.org/x/text v0.8.0 // indirect
golang.org/x/net v0.9.0 // indirect
golang.org/x/sys v0.7.0 // indirect
golang.org/x/text v0.9.0 // indirect
golang.org/x/tools v0.7.0 // indirect
google.golang.org/protobuf v1.28.1 // indirect
gopkg.in/natefinch/lumberjack.v2 v2.0.0 // indirect
......
This diff is collapsed.
FROM golang:1.19.0-alpine3.15 as builder
FROM --platform=$BUILDPLATFORM golang:1.19.9-alpine3.16 as builder
RUN apk add --no-cache make gcc musl-dev linux-headers git jq bash
COPY ./indexer /go/indexer
COPY ./.git /go/.git
COPY ./indexer/go.mod /go/indexer/go.mod
COPY ./indexer/go.sum /go/indexer/go.sum
# build indexer with the shared go.mod & go.sum files
COPY ./indexer /app/indexer
COPY ./op-bindings /app/op-bindings
COPY ./op-service /app/op-service
COPY ./op-node /app/op-node
COPY ./go.mod /app/go.mod
COPY ./go.sum /app/go.sum
COPY ./.git /app/.git
WORKDIR /go/indexer
RUN make
WORKDIR /app/indexer
FROM alpine:3.15
RUN go mod download
COPY --from=builder /go/indexer/indexer /usr/local/bin
RUN make indexer
FROM alpine:3.16
COPY --from=builder /app/indexer/indexer /usr/local/bin
CMD ["indexer"]
GITCOMMIT := $(shell git rev-parse HEAD)
GITDATE := $(shell git show -s --format='%ct')
GITVERSION := $(shell cat package.json | jq .version)
LDFLAGSSTRING +=-X main.GitCommit=$(GITCOMMIT)
LDFLAGSSTRING +=-X main.GitDate=$(GITDATE)
LDFLAGSSTRING +=-X main.GitVersion=$(GITVERSION)
LDFLAGS := -ldflags "$(LDFLAGSSTRING)"
indexer:
......
module github.com/ethereum-optimism/optimism/indexer
go 1.19
replace github.com/ethereum/go-ethereum v1.11.6 => github.com/ethereum-optimism/op-geth v1.101106.0-rc.2
require (
github.com/BurntSushi/toml v1.3.0
github.com/ethereum-optimism/optimism v1.0.9
github.com/ethereum/go-ethereum v1.11.6
github.com/go-chi/chi/v5 v5.0.8
github.com/google/uuid v1.3.0
github.com/gorilla/mux v1.8.0
github.com/jackc/pgtype v1.14.0
github.com/lib/pq v1.10.4
github.com/prometheus/client_golang v1.14.0
github.com/rs/cors v1.8.2
github.com/stretchr/testify v1.8.1
github.com/urfave/cli v1.22.9
github.com/urfave/cli/v2 v2.17.2-0.20221006022127-8f469abc00aa
gorm.io/driver/postgres v1.5.2
gorm.io/gorm v1.25.1
)
require (
github.com/DataDog/zstd v1.5.2 // indirect
github.com/VictoriaMetrics/fastcache v1.10.0 // indirect
github.com/benbjohnson/clock v1.3.0 // indirect
github.com/beorn7/perks v1.0.1 // indirect
github.com/btcsuite/btcd v0.23.3 // indirect
github.com/btcsuite/btcd/btcec/v2 v2.2.0 // indirect
github.com/btcsuite/btcd/btcutil v1.1.0 // indirect
github.com/btcsuite/btcd/chaincfg/chainhash v1.0.1 // indirect
github.com/btcsuite/btclog v0.0.0-20170628155309-84c8d2346e9f // indirect
github.com/cespare/xxhash/v2 v2.2.0 // indirect
github.com/cockroachdb/errors v1.9.1 // indirect
github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b // indirect
github.com/cockroachdb/pebble v0.0.0-20230209160836-829675f94811 // indirect
github.com/cockroachdb/redact v1.1.3 // indirect
github.com/containerd/cgroups v1.1.0 // indirect
github.com/coreos/go-systemd/v22 v22.5.0 // indirect
github.com/cpuguy83/go-md2man/v2 v2.0.2 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/davidlazar/go-crypto v0.0.0-20200604182044-b73af7476f6c // indirect
github.com/deckarep/golang-set/v2 v2.1.0 // indirect
github.com/decred/dcrd/crypto/blake256 v1.0.0 // indirect
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.1.0 // indirect
github.com/deepmap/oapi-codegen v1.8.2 // indirect
github.com/docker/go-units v0.5.0 // indirect
github.com/edsrzf/mmap-go v1.1.0 // indirect
github.com/elastic/gosigar v0.14.2 // indirect
github.com/emirpasic/gods v1.18.1 // indirect
github.com/ethereum-optimism/go-ethereum-hdwallet v0.1.3 // indirect
github.com/fjl/memsize v0.0.1 // indirect
github.com/flynn/noise v1.0.0 // indirect
github.com/francoispqt/gojay v1.2.13 // indirect
github.com/fsnotify/fsnotify v1.6.0 // indirect
github.com/gballet/go-libpcsclite v0.0.0-20191108122812-4678299bea08 // indirect
github.com/getsentry/sentry-go v0.18.0 // indirect
github.com/go-ole/go-ole v1.2.6 // indirect
github.com/go-stack/stack v1.8.1 // indirect
github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0 // indirect
github.com/godbus/dbus/v5 v5.1.0 // indirect
github.com/gofrs/flock v0.8.1 // indirect
github.com/gogo/protobuf v1.3.2 // indirect
github.com/golang-jwt/jwt/v4 v4.4.2 // indirect
github.com/golang/mock v1.6.0 // indirect
github.com/golang/protobuf v1.5.2 // indirect
github.com/golang/snappy v0.0.5-0.20220116011046-fa5810519dcb // indirect
github.com/google/gopacket v1.1.19 // indirect
github.com/google/pprof v0.0.0-20230207041349-798e818bf904 // indirect
github.com/gorilla/websocket v1.5.0 // indirect
github.com/graph-gophers/graphql-go v1.3.0 // indirect
github.com/hashicorp/errwrap v1.1.0 // indirect
github.com/hashicorp/go-bexpr v0.1.11 // indirect
github.com/hashicorp/go-multierror v1.1.1 // indirect
github.com/hashicorp/golang-lru v0.5.5-0.20210104140557-80c98217689d // indirect
github.com/hashicorp/golang-lru/v2 v2.0.1 // indirect
github.com/holiman/bloomfilter/v2 v2.0.3 // indirect
github.com/holiman/uint256 v1.2.2-0.20230321075855-87b91420868c // indirect
github.com/huin/goupnp v1.1.0 // indirect
github.com/influxdata/influxdb-client-go/v2 v2.4.0 // indirect
github.com/influxdata/influxdb1-client v0.0.0-20220302092344-a9ab5670611c // indirect
github.com/influxdata/line-protocol v0.0.0-20210311194329-9aa0e372d097 // indirect
github.com/ipfs/go-cid v0.3.2 // indirect
github.com/ipfs/go-datastore v0.6.0 // indirect
github.com/ipfs/go-log v1.0.5 // indirect
github.com/ipfs/go-log/v2 v2.5.1 // indirect
github.com/jackc/pgio v1.0.0 // indirect
github.com/jackc/pgpassfile v1.0.0 // indirect
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a // indirect
github.com/jackc/pgx/v5 v5.3.1 // indirect
github.com/jackpal/go-nat-pmp v1.0.2 // indirect
github.com/jbenet/go-temp-err-catcher v0.1.0 // indirect
github.com/jbenet/goprocess v0.1.4 // indirect
github.com/jinzhu/inflection v1.0.0 // indirect
github.com/jinzhu/now v1.1.5 // indirect
github.com/klauspost/compress v1.15.15 // indirect
github.com/klauspost/cpuid/v2 v2.2.3 // indirect
github.com/koron/go-ssdp v0.0.3 // indirect
github.com/kr/pretty v0.3.1 // indirect
github.com/kr/text v0.2.0 // indirect
github.com/libp2p/go-buffer-pool v0.1.0 // indirect
github.com/libp2p/go-cidranger v1.1.0 // indirect
github.com/libp2p/go-flow-metrics v0.1.0 // indirect
github.com/libp2p/go-libp2p v0.25.1 // indirect
github.com/libp2p/go-libp2p-asn-util v0.2.0 // indirect
github.com/libp2p/go-libp2p-pubsub v0.9.0 // indirect
github.com/libp2p/go-libp2p-testing v0.12.0 // indirect
github.com/libp2p/go-mplex v0.7.0 // indirect
github.com/libp2p/go-msgio v0.3.0 // indirect
github.com/libp2p/go-nat v0.1.0 // indirect
github.com/libp2p/go-netroute v0.2.1 // indirect
github.com/libp2p/go-reuseport v0.2.0 // indirect
github.com/libp2p/go-yamux/v4 v4.0.0 // indirect
github.com/marten-seemann/tcp v0.0.0-20210406111302-dfbc87cc63fd // indirect
github.com/mattn/go-colorable v0.1.13 // indirect
github.com/mattn/go-isatty v0.0.17 // indirect
github.com/mattn/go-runewidth v0.0.14 // indirect
github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect
github.com/miekg/dns v1.1.50 // indirect
github.com/mikioh/tcpinfo v0.0.0-20190314235526-30a79bb1804b // indirect
github.com/mikioh/tcpopt v0.0.0-20190314235656-172688c1accc // indirect
github.com/minio/sha256-simd v1.0.0 // indirect
github.com/mitchellh/mapstructure v1.5.0 // indirect
github.com/mitchellh/pointerstructure v1.2.1 // indirect
github.com/mr-tron/base58 v1.2.0 // indirect
github.com/multiformats/go-base32 v0.1.0 // indirect
github.com/multiformats/go-base36 v0.2.0 // indirect
github.com/multiformats/go-multiaddr v0.8.0 // indirect
github.com/multiformats/go-multiaddr-dns v0.3.1 // indirect
github.com/multiformats/go-multiaddr-fmt v0.1.0 // indirect
github.com/multiformats/go-multibase v0.1.1 // indirect
github.com/multiformats/go-multicodec v0.8.1 // indirect
github.com/multiformats/go-multihash v0.2.1 // indirect
github.com/multiformats/go-multistream v0.4.1 // indirect
github.com/multiformats/go-varint v0.0.7 // indirect
github.com/olekukonko/tablewriter v0.0.5 // indirect
github.com/onsi/ginkgo/v2 v2.8.1 // indirect
github.com/opencontainers/runtime-spec v1.0.2 // indirect
github.com/opentracing/opentracing-go v1.2.0 // indirect
github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58 // indirect
github.com/peterh/liner v1.1.1-0.20190123174540-a2c9a5303de7 // indirect
github.com/pkg/errors v0.9.1 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/prometheus/client_model v0.3.0 // indirect
github.com/prometheus/common v0.39.0 // indirect
github.com/prometheus/procfs v0.9.0 // indirect
github.com/quic-go/qpack v0.4.0 // indirect
github.com/quic-go/qtls-go1-18 v0.2.0 // indirect
github.com/quic-go/qtls-go1-19 v0.2.0 // indirect
github.com/quic-go/qtls-go1-20 v0.1.0 // indirect
github.com/quic-go/quic-go v0.32.0 // indirect
github.com/quic-go/webtransport-go v0.5.1 // indirect
github.com/raulk/go-watchdog v1.3.0 // indirect
github.com/rivo/uniseg v0.4.3 // indirect
github.com/rogpeppe/go-internal v1.9.0 // indirect
github.com/russross/blackfriday/v2 v2.1.0 // indirect
github.com/shirou/gopsutil v3.21.11+incompatible // indirect
github.com/spaolacci/murmur3 v1.1.0 // indirect
github.com/status-im/keycard-go v0.2.0 // indirect
github.com/stretchr/objx v0.5.0 // indirect
github.com/syndtr/goleveldb v1.0.1-0.20220614013038-64ee5596c38a // indirect
github.com/tklauser/go-sysconf v0.3.10 // indirect
github.com/tklauser/numcpus v0.5.0 // indirect
github.com/tyler-smith/go-bip39 v1.1.0 // indirect
github.com/xrash/smetrics v0.0.0-20201216005158-039620a65673 // indirect
github.com/yusufpapurcu/wmi v1.2.2 // indirect
go.uber.org/atomic v1.10.0 // indirect
go.uber.org/dig v1.16.1 // indirect
go.uber.org/fx v1.19.1 // indirect
go.uber.org/multierr v1.9.0 // indirect
go.uber.org/zap v1.24.0 // indirect
golang.org/x/crypto v0.8.0 // indirect
golang.org/x/exp v0.0.0-20230213192124-5e25df0256eb // indirect
golang.org/x/mod v0.9.0 // indirect
golang.org/x/net v0.9.0 // indirect
golang.org/x/sync v0.1.0 // indirect
golang.org/x/sys v0.7.0 // indirect
golang.org/x/term v0.7.0 // indirect
golang.org/x/text v0.9.0 // indirect
golang.org/x/time v0.0.0-20220922220347-f3bd1da661af // indirect
golang.org/x/tools v0.7.0 // indirect
google.golang.org/protobuf v1.28.1 // indirect
gopkg.in/natefinch/lumberjack.v2 v2.0.0 // indirect
gopkg.in/natefinch/npipe.v2 v2.0.0-20160621034901-c1b8fa8bdcce // indirect
gopkg.in/yaml.v2 v2.4.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
lukechampine.com/blake3 v1.1.7 // indirect
nhooyr.io/websocket v1.8.7 // indirect
)
This source diff could not be displayed because it is too large. You can view the blob instead.
package integration_tests
import (
"context"
"database/sql"
"encoding/json"
"fmt"
......@@ -14,10 +13,7 @@ import (
"github.com/ethereum/go-ethereum/accounts/abi/bind"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/core/types"
"github.com/ethereum/go-ethereum/ethclient"
"github.com/ethereum/go-ethereum/ethclient/gethclient"
"github.com/ethereum/go-ethereum/params"
"github.com/ethereum/go-ethereum/rpc"
"github.com/stretchr/testify/require"
"github.com/ethereum-optimism/optimism/indexer/db"
......@@ -29,6 +25,7 @@ import (
"github.com/ethereum-optimism/optimism/op-e2e/e2eutils"
"github.com/ethereum-optimism/optimism/op-node/rollup/derive"
"github.com/ethereum-optimism/optimism/op-node/withdrawals"
"github.com/ethereum-optimism/optimism/op-service/client/utils"
_ "github.com/lib/pq"
)
......@@ -47,14 +44,12 @@ func TestBedrockIndexer(t *testing.T) {
fromAddr := cfg.Secrets.Addresses().Alice
// wait a couple of blocks
require.NoError(t, e2eutils.WaitBlock(e2eutils.TimeoutCtx(t, 30*time.Second), l2Client, 10))
require.NoError(t, utils.WaitBlock(e2eutils.TimeoutCtx(t, 30*time.Second), l2Client, 10))
l1SB, err := bindings.NewL1StandardBridge(predeploys.DevL1StandardBridgeAddr, l1Client)
require.NoError(t, err)
l2SB, err := bindings.NewL2StandardBridge(predeploys.L2StandardBridgeAddr, l2Client)
require.NoError(t, err)
portal, err := bindings.NewOptimismPortal(predeploys.DevOptimismPortalAddr, l1Client)
require.NoError(t, err)
l1Opts, err := bind.NewKeyedTransactorWithChainID(cfg.Secrets.Alice, cfg.L1ChainIDBig())
require.NoError(t, err)
l2Opts, err := bind.NewKeyedTransactorWithChainID(cfg.Secrets.Alice, cfg.L2ChainIDBig())
......@@ -104,7 +99,7 @@ func TestBedrockIndexer(t *testing.T) {
l1Opts.Value = big.NewInt(params.Ether)
depTx, err := l1SB.DepositETH(l1Opts, 200_000, nil)
require.NoError(t, err)
depReceipt, err := e2eutils.WaitReceiptOK(e2eutils.TimeoutCtx(t, 10*time.Second), l1Client, depTx.Hash())
depReceipt, err := utils.WaitReceiptOK(e2eutils.TimeoutCtx(t, 10*time.Second), l1Client, depTx.Hash())
require.NoError(t, err)
require.Greaterf(t, len(depReceipt.Logs), 0, "must have logs")
var l2Hash common.Hash
......@@ -119,12 +114,12 @@ func TestBedrockIndexer(t *testing.T) {
l2Hash = tx.Hash()
}
require.NotEqual(t, common.Hash{}, l2Hash)
_, err = e2eutils.WaitReceiptOK(e2eutils.TimeoutCtx(t, 15*time.Second), l2Client, l2Hash)
_, err = utils.WaitReceiptOK(e2eutils.TimeoutCtx(t, 15*time.Second), l2Client, l2Hash)
require.NoError(t, err)
// Poll for indexer deposit
var depPage *db.PaginatedDeposits
require.NoError(t, e2eutils.WaitFor(e2eutils.TimeoutCtx(t, 30*time.Second), 100*time.Millisecond, func() (bool, error) {
require.NoError(t, utils.WaitFor(e2eutils.TimeoutCtx(t, 30*time.Second), 100*time.Millisecond, func() (bool, error) {
res := new(db.PaginatedDeposits)
err := getJSON(makeURL(fmt.Sprintf("v1/deposits/%s", fromAddr)), res)
if err != nil {
......@@ -155,11 +150,11 @@ func TestBedrockIndexer(t *testing.T) {
l2Opts.Value = big.NewInt(0.5 * params.Ether)
wdTx, err := l2SB.Withdraw(l2Opts, predeploys.LegacyERC20ETHAddr, big.NewInt(0.5*params.Ether), 0, nil)
require.NoError(t, err)
wdReceipt, err := e2eutils.WaitReceiptOK(e2eutils.TimeoutCtx(t, 30*time.Second), l2Client, wdTx.Hash())
wdReceipt, err := utils.WaitReceiptOK(e2eutils.TimeoutCtx(t, 30*time.Second), l2Client, wdTx.Hash())
require.NoError(t, err)
var wdPage *db.PaginatedWithdrawals
require.NoError(t, e2eutils.WaitFor(e2eutils.TimeoutCtx(t, 30*time.Second), 100*time.Millisecond, func() (bool, error) {
require.NoError(t, utils.WaitFor(e2eutils.TimeoutCtx(t, 30*time.Second), 100*time.Millisecond, func() (bool, error) {
res := new(db.PaginatedWithdrawals)
err := getJSON(makeURL(fmt.Sprintf("v1/withdrawals/%s", fromAddr)), res)
if err != nil {
......@@ -189,50 +184,11 @@ func TestBedrockIndexer(t *testing.T) {
require.Equal(t, db.ETHL2Token, withdrawal.L2Token)
require.NotEmpty(t, withdrawal.GUID)
finBlockNum, err := withdrawals.WaitForFinalizationPeriod(
e2eutils.TimeoutCtx(t, time.Minute),
l1Client,
predeploys.DevOptimismPortalAddr,
wdReceipt.BlockNumber,
)
require.NoError(t, err)
finHeader, err := l2Client.HeaderByNumber(context.Background(), big.NewInt(int64(finBlockNum)))
require.NoError(t, err)
rpcClient, err := rpc.Dial(sys.Nodes["sequencer"].HTTPEndpoint())
require.NoError(t, err)
proofCl := gethclient.New(rpcClient)
receiptCl := ethclient.NewClient(rpcClient)
oracle, err := bindings.NewL2OutputOracleCaller(predeploys.DevL2OutputOracleAddr, l1Client)
require.Nil(t, err)
wParams, err := withdrawals.ProveWithdrawalParameters(context.Background(), proofCl, receiptCl, wdTx.Hash(), finHeader, oracle)
require.NoError(t, err)
l1Opts.Value = big.NewInt(0)
withdrawalTx := bindings.TypesWithdrawalTransaction{
Nonce: wParams.Nonce,
Sender: wParams.Sender,
Target: wParams.Target,
Value: wParams.Value,
GasLimit: wParams.GasLimit,
Data: wParams.Data,
}
// Prove our withdrawal
proveTx, err := portal.ProveWithdrawalTransaction(
l1Opts,
withdrawalTx,
wParams.L2OutputIndex,
wParams.OutputRootProof,
wParams.WithdrawalProof,
)
require.NoError(t, err)
proveReceipt, err := e2eutils.WaitReceiptOK(e2eutils.TimeoutCtx(t, time.Minute), l1Client, proveTx.Hash())
require.NoError(t, err)
wdParams, proveReceipt := op_e2e.ProveWithdrawal(t, cfg, l1Client, sys.Nodes["sequencer"], cfg.Secrets.Alice, wdReceipt)
wdPage = nil
require.NoError(t, e2eutils.WaitFor(e2eutils.TimeoutCtx(t, 30*time.Second), 100*time.Millisecond, func() (bool, error) {
require.NoError(t, utils.WaitFor(e2eutils.TimeoutCtx(t, 30*time.Second), 100*time.Millisecond, func() (bool, error) {
res := new(db.PaginatedWithdrawals)
err := getJSON(makeURL(fmt.Sprintf("v1/withdrawals/%s", fromAddr)), res)
if err != nil {
......@@ -251,27 +207,13 @@ func TestBedrockIndexer(t *testing.T) {
require.Equal(t, proveReceipt.TxHash.String(), *wd.BedrockProvenTxHash)
require.Nil(t, wd.BedrockFinalizedTxHash)
// Wait for the finalization period to elapse
_, err = withdrawals.WaitForFinalizationPeriod(
e2eutils.TimeoutCtx(t, time.Minute),
l1Client,
predeploys.DevOptimismPortalAddr,
finHeader.Number,
)
require.NoError(t, err)
// Send our finalize withdrawal transaction
finTx, err := portal.FinalizeWithdrawalTransaction(
l1Opts,
withdrawalTx,
)
require.NoError(t, err)
finReceipt, err := e2eutils.WaitReceiptOK(e2eutils.TimeoutCtx(t, time.Minute), l1Client, finTx.Hash())
require.NoError(t, err)
// Finalize withdrawal
err = withdrawals.WaitForFinalizationPeriod(e2eutils.TimeoutCtx(t, 30*time.Second), l1Client, predeploys.DevOptimismPortalAddr, proveReceipt.BlockNumber)
require.Nil(t, err)
finReceipt := op_e2e.FinalizeWithdrawal(t, cfg, l1Client, cfg.Secrets.Alice, wdReceipt, wdParams)
wdPage = nil
require.NoError(t, e2eutils.WaitFor(e2eutils.TimeoutCtx(t, 30*time.Second), 100*time.Millisecond, func() (bool, error) {
require.NoError(t, utils.WaitFor(e2eutils.TimeoutCtx(t, 30*time.Second), 100*time.Millisecond, func() (bool, error) {
res := new(db.PaginatedWithdrawals)
err := getJSON(makeURL(fmt.Sprintf("v1/withdrawals/%s", fromAddr)), res)
if err != nil {
......
......@@ -56,6 +56,9 @@
"inputs": ["default", "testing", "^production"],
"dependsOn": ["^build"]
},
"generate": {
"dependsOn": ["^build"]
},
"build:contracts": {
"inputs": [
"configsProject",
......
......@@ -28,3 +28,7 @@ $ make devtools
```
The geth docs for `abigen` can be found [here](https://geth.ethereum.org/docs/dapp/native-bindings).
## See also
TypeScript bindings are also generated in [@eth-optimism/contracts-ts](../packages/contracts-ts/)
......@@ -2,6 +2,9 @@ package predeploys
import "github.com/ethereum/go-ethereum/common"
// TODO - we should get a single toml yaml or json file source of truth in @eth-optimism/bedrock package
// This needs to be kept in sync with @eth-optimism/contracts-ts/wagmi.config.ts which also specifies this
// To improve robustness and maintainability contracts-bedrock should export all addresses
const (
L2ToL1MessagePasser = "0x4200000000000000000000000000000000000016"
DeployerWhitelist = "0x4200000000000000000000000000000000000002"
......
package predeploys
import "github.com/ethereum/go-ethereum/common"
const (
LegacyERC20ETH = "0xDeadDeAddeAddEAddeadDEaDDEAdDeaDDeAD0000"
)
var (
LegacyERC20ETHAddr = common.HexToAddress(LegacyERC20ETH)
)
......@@ -20,35 +20,3 @@ to see a list of available options.
`op-challenger` is configurable via command line flags and environment variables. The help menu
shows the available config options and can be accessed by running `./op-challenger --help`.
Note that there are many global options, but the most important ones are:
- `OP_CHALLENGER_L1_ETH_RPC`: An L1 Ethereum RPC URL
- `OP_CHALLENGER_ROLLUP_RPC`: A Rollup Node RPC URL
- `OP_CHALLENGER_L2OO_ADDRESS`: The L2OutputOracle Contract Address
- `OP_CHALLENGER_DGF_ADDRESS`: Dispute Game Factory Contract Address
Here is a reduced output from running `./op-challenger --help`:
```bash
NAME:
op-challenger - Modular Challenger Agent
USAGE:
main [global options] command [command options] [arguments...]
VERSION:
1.0.0
DESCRIPTION:
A modular op-stack challenge agent for output dispute games written in golang.
COMMANDS:
help, h Shows a list of commands or help for one command
GLOBAL OPTIONS:
--l1-eth-rpc value HTTP provider URL for L1. [$OP_CHALLENGER_L1_ETH_RPC]
--rollup-rpc value HTTP provider URL for the rollup node. [$OP_CHALLENGER_ROLLUP_RPC]
--l2oo-address value Address of the L2OutputOracle contract. [$OP_CHALLENGER_L2OO_ADDRESS]
--dgf-address value Address of the DisputeGameFactory contract. [$OP_CHALLENGER_DGF_ADDRESS]
...
--help, -h show help
--version, -v print the version
```
......@@ -35,16 +35,21 @@ func Main(logger log.Logger, cfg *config.Config) error {
if err != nil {
return fmt.Errorf("failed to create the responder: %w", err)
}
gameDepth := 4
trace := fault.NewAlphabetProvider(cfg.AlphabetTrace, uint64(gameDepth))
trace := fault.NewAlphabetProvider(cfg.AlphabetTrace, uint64(cfg.GameDepth))
agent := fault.NewAgent(loader, gameDepth, trace, responder, cfg.AgreeWithProposedOutput, logger)
agent := fault.NewAgent(loader, cfg.GameDepth, trace, responder, cfg.AgreeWithProposedOutput, logger)
caller, err := fault.NewFaultCallerFromBindings(cfg.GameAddress, client, logger)
if err != nil {
return fmt.Errorf("failed to bind the fault contract: %w", err)
}
logger.Info("Fault game started")
for {
logger.Info("Performing action")
_ = agent.Act()
caller.LogGameInfo()
time.Sleep(300 * time.Millisecond)
}
}
......@@ -14,4 +14,11 @@ MALLORY_KEY="28d7045146193f5f4eeb151c4843544b1b0d30a7ac1680c845a416fac65a7715"
FAULT_GAME_ADDRESS="0x8daf17a20c9dba35f005b6324f493785d239719d"
./bin/op-challenger --l1-eth-rpc http://localhost:8545 --alphabet "abcdefgh" --game-address $FAULT_GAME_ADDRESS --private-key $CHARLIE_KEY --num-confirmations 1 --agree-with-proposed-output=true
./bin/op-challenger \
--l1-eth-rpc http://localhost:8545 \
--alphabet "abcdefgh" \
--game-address $FAULT_GAME_ADDRESS \
--private-key $CHARLIE_KEY \
--num-confirmations 1 \
--game-depth 4 \
--agree-with-proposed-output=true
......@@ -16,6 +16,7 @@ var (
gameAddressValue = "0xaa00000000000000000000000000000000000000"
alphabetTrace = "abcdefghijz"
agreeWithProposedOutput = "true"
gameDepth = "4"
)
func TestLogLevel(t *testing.T) {
......@@ -35,12 +36,12 @@ func TestLogLevel(t *testing.T) {
func TestDefaultCLIOptionsMatchDefaultConfig(t *testing.T) {
cfg := configForArgs(t, addRequiredArgs())
defaultCfg := config.NewConfig(l1EthRpc, common.HexToAddress(gameAddressValue), alphabetTrace, true)
defaultCfg := config.NewConfig(l1EthRpc, common.HexToAddress(gameAddressValue), alphabetTrace, true, 4)
require.Equal(t, defaultCfg, cfg)
}
func TestDefaultConfigIsValid(t *testing.T) {
cfg := config.NewConfig(l1EthRpc, common.HexToAddress(gameAddressValue), alphabetTrace, true)
cfg := config.NewConfig(l1EthRpc, common.HexToAddress(gameAddressValue), alphabetTrace, true, 4)
require.NoError(t, cfg.Check())
}
......@@ -109,6 +110,18 @@ func TestAgreeWithProposedOutput(t *testing.T) {
})
}
func TestGameDepth(t *testing.T) {
t.Run("Required", func(t *testing.T) {
verifyArgsInvalid(t, "flag game-depth is required", addRequiredArgsExcept("--game-depth"))
})
t.Run("Valid", func(t *testing.T) {
value := "4"
cfg := configForArgs(t, addRequiredArgsExcept("--game-depth", "--game-depth="+value))
require.Equal(t, value, fmt.Sprint(cfg.GameDepth))
})
}
func verifyArgsInvalid(t *testing.T, messageContains string, cliArgs []string) {
_, _, err := runWithArgs(cliArgs)
require.ErrorContains(t, err, messageContains)
......@@ -146,6 +159,7 @@ func addRequiredArgsExcept(name string, optionalArgs ...string) []string {
func requiredArgs() map[string]string {
return map[string]string{
"--game-depth": gameDepth,
"--agree-with-proposed-output": agreeWithProposedOutput,
"--l1-eth-rpc": l1EthRpc,
"--game-address": gameAddressValue,
......
......@@ -25,6 +25,7 @@ type Config struct {
GameAddress common.Address // Address of the fault game
AlphabetTrace string // String for the AlphabetTraceProvider
AgreeWithProposedOutput bool // Temporary config if we agree or disagree with the posted output
GameDepth int // Depth of the game tree
TxMgrConfig txmgr.CLIConfig
}
......@@ -34,6 +35,7 @@ func NewConfig(
GameAddress common.Address,
AlphabetTrace string,
AgreeWithProposedOutput bool,
GameDepth int,
) Config {
return Config{
L1EthRpc: l1EthRpc,
......@@ -41,6 +43,7 @@ func NewConfig(
AlphabetTrace: AlphabetTrace,
TxMgrConfig: txmgr.NewCLIConfig(l1EthRpc),
AgreeWithProposedOutput: AgreeWithProposedOutput,
GameDepth: GameDepth,
}
}
......@@ -78,6 +81,7 @@ func NewConfigFromCLI(ctx *cli.Context) (*Config, error) {
GameAddress: dgfAddress,
AlphabetTrace: ctx.String(flags.AlphabetFlag.Name),
AgreeWithProposedOutput: ctx.Bool(flags.AgreeWithProposedOutputFlag.Name),
GameDepth: ctx.Int(flags.GameDepthFlag.Name),
TxMgrConfig: txMgrConfig,
}, nil
}
......@@ -13,10 +13,11 @@ var (
validGameAddress = common.HexToAddress("0x7bdd3b028C4796eF0EAf07d11394d0d9d8c24139")
validAlphabetTrace = "abcdefgh"
agreeWithProposedOutput = true
gameDepth = 4
)
func validConfig() Config {
cfg := NewConfig(validL1EthRpc, validGameAddress, validAlphabetTrace, agreeWithProposedOutput)
cfg := NewConfig(validL1EthRpc, validGameAddress, validAlphabetTrace, agreeWithProposedOutput, gameDepth)
return cfg
}
......
......@@ -95,7 +95,8 @@ func (a *Agent) step(claim Claim, game Game) error {
return nil
}
if game.AgreeWithClaimLevel(claim) {
agreeWithClaimLevel := game.AgreeWithClaimLevel(claim)
if agreeWithClaimLevel {
a.log.Warn("Agree with leaf claim, skipping step", "claim_depth", claim.Depth(), "maxDepth", a.maxDepth)
return nil
}
......@@ -106,7 +107,7 @@ func (a *Agent) step(claim Claim, game Game) error {
}
a.log.Info("Attempting step", "claim_depth", claim.Depth(), "maxDepth", a.maxDepth)
step, err := a.solver.AttemptStep(claim)
step, err := a.solver.AttemptStep(claim, agreeWithClaimLevel)
if err != nil {
a.log.Warn("Failed to get a step", "err", err)
return err
......
......@@ -49,7 +49,7 @@ func (ap *AlphabetProvider) Get(i uint64) (common.Hash, error) {
func (ap *AlphabetProvider) AbsolutePreState() []byte {
out := make([]byte, 32)
out[31] = 140 // ascii character 140 is "`"
out[31] = 96 // ascii character 96 is "`"
return out
}
......
package fault
import (
"context"
"math/big"
"github.com/ethereum-optimism/optimism/op-bindings/bindings"
"github.com/ethereum/go-ethereum/accounts/abi/bind"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/ethclient"
"github.com/ethereum/go-ethereum/log"
)
type FaultDisputeGameCaller interface {
Status(opts *bind.CallOpts) (uint8, error)
ClaimDataLen(opts *bind.CallOpts) (*big.Int, error)
}
type FaultCaller struct {
FaultDisputeGameCaller
log log.Logger
fdgAddr common.Address
}
func NewFaultCaller(fdgAddr common.Address, caller FaultDisputeGameCaller, log log.Logger) *FaultCaller {
return &FaultCaller{
caller,
log,
fdgAddr,
}
}
func NewFaultCallerFromBindings(fdgAddr common.Address, client *ethclient.Client, log log.Logger) (*FaultCaller, error) {
caller, err := bindings.NewFaultDisputeGameCaller(fdgAddr, client)
if err != nil {
return nil, err
}
return &FaultCaller{
caller,
log,
fdgAddr,
}, nil
}
// LogGameInfo logs the game info.
func (fc *FaultCaller) LogGameInfo() {
status, err := fc.GetGameStatus(context.Background())
if err != nil {
fc.log.Error("failed to get game status", "err", err)
return
}
claimLen, err := fc.GetClaimDataLength(context.Background())
if err != nil {
fc.log.Error("failed to get claim count", "err", err)
return
}
fc.log.Info("Game info", "addr", fc.fdgAddr, "claims", claimLen, "status", GameStatusString(status))
}
// GetGameStatus returns the current game status.
// 0: In Progress
// 1: Challenger Won
// 2: Defender Won
func (fc *FaultCaller) GetGameStatus(ctx context.Context) (uint8, error) {
return fc.Status(&bind.CallOpts{Context: ctx})
}
func (fc *FaultCaller) LogGameStatus() {
status, err := fc.GetGameStatus(context.Background())
if err != nil {
fc.log.Error("failed to get game status", "err", err)
return
}
fc.log.Info("Game status", "status", GameStatusString(status))
}
// GetClaimDataLength returns the number of claims in the game.
func (fc *FaultCaller) GetClaimDataLength(ctx context.Context) (*big.Int, error) {
return fc.ClaimDataLen(&bind.CallOpts{Context: ctx})
}
func (fc *FaultCaller) LogClaimDataLength() {
claimLen, err := fc.GetClaimDataLength(context.Background())
if err != nil {
fc.log.Error("failed to get claim count", "err", err)
return
}
fc.log.Info("Number of claims", "length", claimLen)
}
// GameStatusString returns the current game status as a string.
func GameStatusString(status uint8) string {
switch status {
case 0:
return "In Progress"
case 1:
return "Challenger Won"
case 2:
return "Defender Won"
default:
return "Unknown"
}
}
package fault
import (
"context"
"errors"
"math/big"
"testing"
"github.com/ethereum/go-ethereum/accounts/abi/bind"
"github.com/ethereum/go-ethereum/common"
"github.com/stretchr/testify/require"
)
var (
testAddr = common.HexToAddress("0x1234567890123456789012345678901234567890")
errMock = errors.New("mock error")
)
type mockFaultDisputeGameCaller struct {
status uint8
errStatus bool
claimDataLen *big.Int
errClaimDataLen bool
}
func (m *mockFaultDisputeGameCaller) Status(opts *bind.CallOpts) (uint8, error) {
if m.errStatus {
return 0, errMock
}
return m.status, nil
}
func (m *mockFaultDisputeGameCaller) ClaimDataLen(opts *bind.CallOpts) (*big.Int, error) {
if m.errClaimDataLen {
return nil, errMock
}
return m.claimDataLen, nil
}
func TestFaultCaller_GetGameStatus(t *testing.T) {
tests := []struct {
name string
caller FaultDisputeGameCaller
expectedStatus uint8
expectedErr error
}{
{
name: "success",
caller: &mockFaultDisputeGameCaller{
status: 1,
},
expectedStatus: 1,
expectedErr: nil,
},
{
name: "error",
caller: &mockFaultDisputeGameCaller{
errStatus: true,
},
expectedStatus: 0,
expectedErr: errMock,
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
fc := NewFaultCaller(testAddr, test.caller, nil)
status, err := fc.GetGameStatus(context.Background())
require.Equal(t, test.expectedStatus, status)
require.Equal(t, test.expectedErr, err)
})
}
}
func TestFaultCaller_GetClaimDataLength(t *testing.T) {
tests := []struct {
name string
caller FaultDisputeGameCaller
expectedClaimDataLen *big.Int
expectedErr error
}{
{
name: "success",
caller: &mockFaultDisputeGameCaller{
claimDataLen: big.NewInt(1),
},
expectedClaimDataLen: big.NewInt(1),
expectedErr: nil,
},
{
name: "error",
caller: &mockFaultDisputeGameCaller{
errClaimDataLen: true,
},
expectedClaimDataLen: nil,
expectedErr: errMock,
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
fc := NewFaultCaller(testAddr, test.caller, nil)
claimDataLen, err := fc.GetClaimDataLength(context.Background())
require.Equal(t, test.expectedClaimDataLen, claimDataLen)
require.Equal(t, test.expectedErr, err)
})
}
}
package cannon
import (
"encoding/json"
"errors"
"fmt"
"os"
"path/filepath"
"github.com/ethereum-optimism/optimism/op-challenger/fault"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/common/hexutil"
)
const proofsDir = "proofs"
type proofData struct {
ClaimValue hexutil.Bytes `json:"post"`
StateData hexutil.Bytes `json:"state-data"`
ProofData hexutil.Bytes `json:"proof-data"`
}
type CannonTraceProvider struct {
dir string
}
func NewCannonTraceProvider(dataDir string) *CannonTraceProvider {
return &CannonTraceProvider{
dir: dataDir,
}
}
func (p *CannonTraceProvider) Get(i uint64) (common.Hash, error) {
proof, err := p.loadProof(i)
if err != nil {
return common.Hash{}, err
}
value := common.BytesToHash(proof.ClaimValue)
if value == (common.Hash{}) {
return common.Hash{}, errors.New("proof missing post hash")
}
return value, nil
}
func (p *CannonTraceProvider) GetPreimage(i uint64) ([]byte, error) {
proof, err := p.loadProof(i)
if err != nil {
return nil, err
}
value := ([]byte)(proof.StateData)
if len(value) == 0 {
return nil, errors.New("proof missing state data")
}
return value, nil
}
func (p *CannonTraceProvider) AbsolutePreState() []byte {
panic("absolute prestate not yet supported")
}
func (p *CannonTraceProvider) loadProof(i uint64) (*proofData, error) {
path := filepath.Join(p.dir, proofsDir, fmt.Sprintf("%d.json", i))
file, err := os.Open(path)
if err != nil {
return nil, fmt.Errorf("cannot open proof file (%v): %w", path, err)
}
defer file.Close()
var proof proofData
err = json.NewDecoder(file).Decode(&proof)
if err != nil {
return nil, fmt.Errorf("failed to read proof (%v): %w", path, err)
}
return &proof, nil
}
var _ fault.TraceProvider = (*CannonTraceProvider)(nil)
package cannon
import (
"embed"
_ "embed"
"os"
"path/filepath"
"testing"
"github.com/ethereum/go-ethereum/common"
"github.com/stretchr/testify/require"
)
//go:embed test_data
var testData embed.FS
func TestGet(t *testing.T) {
provider := setupWithTestData(t)
t.Run("ExistingProof", func(t *testing.T) {
value, err := provider.Get(0)
require.NoError(t, err)
require.Equal(t, common.HexToHash("0x45fd9aa59768331c726e719e76aa343e73123af888804604785ae19506e65e87"), value)
})
t.Run("ProofUnavailable", func(t *testing.T) {
_, err := provider.Get(7)
require.ErrorIs(t, err, os.ErrNotExist)
})
t.Run("MissingPostHash", func(t *testing.T) {
_, err := provider.Get(1)
require.ErrorContains(t, err, "missing post hash")
})
t.Run("IgnoreUnknownFields", func(t *testing.T) {
value, err := provider.Get(2)
require.NoError(t, err)
expected := common.HexToHash("bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb")
require.Equal(t, expected, value)
})
}
func TestGetPreimage(t *testing.T) {
provider := setupWithTestData(t)
t.Run("ExistingProof", func(t *testing.T) {
value, err := provider.GetPreimage(0)
require.NoError(t, err)
expected := common.Hex2Bytes("b8f068de604c85ea0e2acd437cdb47add074a2d70b81d018390c504b71fe26f400000000000000000000000000000000000000000000000000000000000000000000000000")
require.Equal(t, expected, value)
})
t.Run("ProofUnavailable", func(t *testing.T) {
_, err := provider.GetPreimage(7)
require.ErrorIs(t, err, os.ErrNotExist)
})
t.Run("MissingStateData", func(t *testing.T) {
_, err := provider.GetPreimage(1)
require.ErrorContains(t, err, "missing state data")
})
t.Run("IgnoreUnknownFields", func(t *testing.T) {
value, err := provider.GetPreimage(2)
require.NoError(t, err)
expected := common.Hex2Bytes("cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc")
require.Equal(t, expected, value)
})
}
func setupWithTestData(t *testing.T) *CannonTraceProvider {
srcDir := filepath.Join("test_data", "proofs")
entries, err := testData.ReadDir(srcDir)
require.NoError(t, err)
dataDir := t.TempDir()
require.NoError(t, os.Mkdir(filepath.Join(dataDir, proofsDir), 0o777))
for _, entry := range entries {
path := filepath.Join(srcDir, entry.Name())
file, err := testData.ReadFile(path)
require.NoErrorf(t, err, "reading %v", path)
err = os.WriteFile(filepath.Join(dataDir, "proofs", entry.Name()), file, 0o644)
require.NoErrorf(t, err, "writing %v", path)
}
return NewCannonTraceProvider(dataDir)
}
{"step":0,"pre":"0x71f9eb93ff904e5c03c3425228ef75766db0c906ad239df9a7a7f0d9c6a89705","post":"0x45fd9aa59768331c726e719e76aa343e73123af888804604785ae19506e65e87","state-data":"0xb8f068de604c85ea0e2acd437cdb47add074a2d70b81d018390c504b71fe26f400000000000000000000000000000000000000000000000000000000000000000000000000","proof-data":"0x08028e3c0000000000000000000000003c01000a24210b7c00200008000000008fa40004240210960000000c0000003403e00008000000008fa100040000102571c0e460346a89963488f904199fc7b4dc3dce2ddadfe484510463ae5014a79df9d922ef2cb84325e4e13ad98828ed29937c1440d8ea9eb19cab7474243c2d0b1a83646e420529153298f3a914a2550658c930f5e519b1d8dd151cf828116697d27264e6fad331820ecf3855adcc68dc529acfc33ecfa45a3a33c9ac766edc1f437988f2abab9dce36d3bac27b0f7b58a06d125acd50a1bf14bb8c7f6c1618465a532f945043b5a9ebc800d7336673019654eb76f8c10cff4f794ee586dc9992c318cef3dfa57032e2dd2fc5cb2dcfebd05551301704dd37a7c169448ec02574f706e38c20963616dae4e03cc91f39a4c3f9608119212965b72948f0ee15feb48b758f050691197816dc3ca919bbb3b50624d195c82d644025647ac8ba07206e5eb830799dfa896506743e81856edf8a31fef737fb4f44501dc71f019bdb12ed9cf0b9fba40ef98e5091b70484ba4f6af7711ec8b0ba4f4f2c4b11455a9e071f465817724159ddeea1170f4dd912c3a5a10ec6b046aa3c4a9febddfeeaa47e3ef06e1758694515562c958dc1b018149c7e4fcd91b9033ee216fea2ea498acd065e61fd436f26c31654bfd27c13ab67707384ad7a84a4b085e890e998e8a9655da954db3d279d598343a4706a2272fca526caeddb017627ecaf0138f1446c82e16d0926c0c510773e2b439c2c71414deb9b739fa370c010380d9ed5927fd7f4bb84ac22747f1bd405830b65d9e04c5efddc2c4dc89ba294c7568b9952193172d75ed8ea3e0fe57c8ad6636da54921ab52a8a0f54920d124f43b9fd3577690140cb46a28b6f55540f89444f63de0378e3d121be09e06cc9ded1c20e65876d36aa0c65e9645644786b620e2dd2ad648ddfcbf4a7e5b1a3a4ecfe7f64667a3f0b7e2f4418588ed35a2458cffeb39b93d26f18d2ab13bdce6aee58e7b99359ec2dfd95a9c16dc00d6ef18b7933a6f8dc65ccb55667138776f7dea101070dc8796e3774df84f40ae0c8229d0d6069e5c8f39a7c299677a09d367fc7b05e3bc380ee652cdc72595f74c7b1043d0e1ffbab734648c838dfb0527d971b602bc216c9619ef6834d8ef8faaf96b7b45235297538a266eb882b8b5680f621aab3417d43cdc2eb8cd74046ff337f0a7bf2c8e03e10f642c1886798d71806ab1e888d9e5ee87d00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","step-input":"0xf8e0cb960000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000014200000000000000000000000000000000000000000000000000000000000000e2b8f068de604c85ea0e2acd437cdb47add074a2d70b81d018390c504b71fe26f4000000000000000000000000000000000000000000000000000000000000000000000000000a3900000a39040000000000000000200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000007fffd0000000000000000000000000000000000000000000000000000000000000000000000000000000070008028e3c0000000000000000000000003c01000a24210b7c00200008000000008fa40004240210960000000c0000003403e00008000000008fa100040000102571c0e460346a89963488f904199fc7b4dc3dce2ddadfe484510463ae5014a79df9d922ef2cb84325e4e13ad98828ed29937c1440d8ea9eb19cab7474243c2d0b1a83646e420529153298f3a914a2550658c930f5e519b1d8dd151cf828116697d27264e6fad331820ecf3855adcc68dc529acfc33ecfa45a3a33c9ac766edc1f437988f2abab9dce36d3bac27b0f7b58a06d125acd50a1bf14bb8c7f6c1618465a532f945043b5a9ebc800d7336673019654eb76f8c10cff4f794ee586dc9992c318cef3dfa57032e2dd2fc5cb2dcfebd05551301704dd37a7c169448ec02574f706e38c20963616dae4e03cc91f39a4c3f9608119212965b72948f0ee15feb48b758f050691197816dc3ca919bbb3b50624d195c82d644025647ac8ba07206e5eb830799dfa896506743e81856edf8a31fef737fb4f44501dc71f019bdb12ed9cf0b9fba40ef98e5091b70484ba4f6af7711ec8b0ba4f4f2c4b11455a9e071f465817724159ddeea1170f4dd912c3a5a10ec6b046aa3c4a9febddfeeaa47e3ef06e1758694515562c958dc1b018149c7e4fcd91b9033ee216fea2ea498acd065e61fd436f26c31654bfd27c13ab67707384ad7a84a4b085e890e998e8a9655da954db3d279d598343a4706a2272fca526caeddb017627ecaf0138f1446c82e16d0926c0c510773e2b439c2c71414deb9b739fa370c010380d9ed5927fd7f4bb84ac22747f1bd405830b65d9e04c5efddc2c4dc89ba294c7568b9952193172d75ed8ea3e0fe57c8ad6636da54921ab52a8a0f54920d124f43b9fd3577690140cb46a28b6f55540f89444f63de0378e3d121be09e06cc9ded1c20e65876d36aa0c65e9645644786b620e2dd2ad648ddfcbf4a7e5b1a3a4ecfe7f64667a3f0b7e2f4418588ed35a2458cffeb39b93d26f18d2ab13bdce6aee58e7b99359ec2dfd95a9c16dc00d6ef18b7933a6f8dc65ccb55667138776f7dea101070dc8796e3774df84f40ae0c8229d0d6069e5c8f39a7c299677a09d367fc7b05e3bc380ee652cdc72595f74c7b1043d0e1ffbab734648c838dfb0527d971b602bc216c9619ef6834d8ef8faaf96b7b45235297538a266eb882b8b5680f621aab3417d43cdc2eb8cd74046ff337f0a7bf2c8e03e10f642c1886798d71806ab1e888d9e5ee87d00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","oracle-input":"0x"}
{"foo":0,"bar":"0x71f9eb93ff904e5c03c3425228ef75766db0c906ad239df9a7a7f0d9c6a89705","post":"0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb","state-data":"0xcccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc","proof-data":"0x08028e3c0000000000000000000000003c01000a24210b7c00200008000000008fa40004240210960000000c0000003403e00008000000008fa100040000102571c0e460346a89963488f904199fc7b4dc3dce2ddadfe484510463ae5014a79df9d922ef2cb84325e4e13ad98828ed29937c1440d8ea9eb19cab7474243c2d0b1a83646e420529153298f3a914a2550658c930f5e519b1d8dd151cf828116697d27264e6fad331820ecf3855adcc68dc529acfc33ecfa45a3a33c9ac766edc1f437988f2abab9dce36d3bac27b0f7b58a06d125acd50a1bf14bb8c7f6c1618465a532f945043b5a9ebc800d7336673019654eb76f8c10cff4f794ee586dc9992c318cef3dfa57032e2dd2fc5cb2dcfebd05551301704dd37a7c169448ec02574f706e38c20963616dae4e03cc91f39a4c3f9608119212965b72948f0ee15feb48b758f050691197816dc3ca919bbb3b50624d195c82d644025647ac8ba07206e5eb830799dfa896506743e81856edf8a31fef737fb4f44501dc71f019bdb12ed9cf0b9fba40ef98e5091b70484ba4f6af7711ec8b0ba4f4f2c4b11455a9e071f465817724159ddeea1170f4dd912c3a5a10ec6b046aa3c4a9febddfeeaa47e3ef06e1758694515562c958dc1b018149c7e4fcd91b9033ee216fea2ea498acd065e61fd436f26c31654bfd27c13ab67707384ad7a84a4b085e890e998e8a9655da954db3d279d598343a4706a2272fca526caeddb017627ecaf0138f1446c82e16d0926c0c510773e2b439c2c71414deb9b739fa370c010380d9ed5927fd7f4bb84ac22747f1bd405830b65d9e04c5efddc2c4dc89ba294c7568b9952193172d75ed8ea3e0fe57c8ad6636da54921ab52a8a0f54920d124f43b9fd3577690140cb46a28b6f55540f89444f63de0378e3d121be09e06cc9ded1c20e65876d36aa0c65e9645644786b620e2dd2ad648ddfcbf4a7e5b1a3a4ecfe7f64667a3f0b7e2f4418588ed35a2458cffeb39b93d26f18d2ab13bdce6aee58e7b99359ec2dfd95a9c16dc00d6ef18b7933a6f8dc65ccb55667138776f7dea101070dc8796e3774df84f40ae0c8229d0d6069e5c8f39a7c299677a09d367fc7b05e3bc380ee652cdc72595f74c7b1043d0e1ffbab734648c838dfb0527d971b602bc216c9619ef6834d8ef8faaf96b7b45235297538a266eb882b8b5680f621aab3417d43cdc2eb8cd74046ff337f0a7bf2c8e03e10f642c1886798d71806ab1e888d9e5ee87d00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","step-input":"0xf8e0cb960000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000014200000000000000000000000000000000000000000000000000000000000000e2b8f068de604c85ea0e2acd437cdb47add074a2d70b81d018390c504b71fe26f4000000000000000000000000000000000000000000000000000000000000000000000000000a3900000a39040000000000000000200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000007fffd0000000000000000000000000000000000000000000000000000000000000000000000000000000070008028e3c0000000000000000000000003c01000a24210b7c00200008000000008fa40004240210960000000c0000003403e00008000000008fa100040000102571c0e460346a89963488f904199fc7b4dc3dce2ddadfe484510463ae5014a79df9d922ef2cb84325e4e13ad98828ed29937c1440d8ea9eb19cab7474243c2d0b1a83646e420529153298f3a914a2550658c930f5e519b1d8dd151cf828116697d27264e6fad331820ecf3855adcc68dc529acfc33ecfa45a3a33c9ac766edc1f437988f2abab9dce36d3bac27b0f7b58a06d125acd50a1bf14bb8c7f6c1618465a532f945043b5a9ebc800d7336673019654eb76f8c10cff4f794ee586dc9992c318cef3dfa57032e2dd2fc5cb2dcfebd05551301704dd37a7c169448ec02574f706e38c20963616dae4e03cc91f39a4c3f9608119212965b72948f0ee15feb48b758f050691197816dc3ca919bbb3b50624d195c82d644025647ac8ba07206e5eb830799dfa896506743e81856edf8a31fef737fb4f44501dc71f019bdb12ed9cf0b9fba40ef98e5091b70484ba4f6af7711ec8b0ba4f4f2c4b11455a9e071f465817724159ddeea1170f4dd912c3a5a10ec6b046aa3c4a9febddfeeaa47e3ef06e1758694515562c958dc1b018149c7e4fcd91b9033ee216fea2ea498acd065e61fd436f26c31654bfd27c13ab67707384ad7a84a4b085e890e998e8a9655da954db3d279d598343a4706a2272fca526caeddb017627ecaf0138f1446c82e16d0926c0c510773e2b439c2c71414deb9b739fa370c010380d9ed5927fd7f4bb84ac22747f1bd405830b65d9e04c5efddc2c4dc89ba294c7568b9952193172d75ed8ea3e0fe57c8ad6636da54921ab52a8a0f54920d124f43b9fd3577690140cb46a28b6f55540f89444f63de0378e3d121be09e06cc9ded1c20e65876d36aa0c65e9645644786b620e2dd2ad648ddfcbf4a7e5b1a3a4ecfe7f64667a3f0b7e2f4418588ed35a2458cffeb39b93d26f18d2ab13bdce6aee58e7b99359ec2dfd95a9c16dc00d6ef18b7933a6f8dc65ccb55667138776f7dea101070dc8796e3774df84f40ae0c8229d0d6069e5c8f39a7c299677a09d367fc7b05e3bc380ee652cdc72595f74c7b1043d0e1ffbab734648c838dfb0527d971b602bc216c9619ef6834d8ef8faaf96b7b45235297538a266eb882b8b5680f621aab3417d43cdc2eb8cd74046ff337f0a7bf2c8e03e10f642c1886798d71806ab1e888d9e5ee87d00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","oracle-input":"0x"}
......@@ -55,6 +55,11 @@ func (r *faultResponder) buildFaultAttackData(parentContractIndex int, pivot [32
)
}
// buildResolveData creates the transaction data for the Resolve function.
func (r *faultResponder) buildResolveData() ([]byte, error) {
return r.fdgAbi.Pack("resolve")
}
// BuildTx builds the transaction for the [faultResponder].
func (r *faultResponder) BuildTx(ctx context.Context, response Claim) ([]byte, error) {
if response.DefendsParent() {
......@@ -72,6 +77,16 @@ func (r *faultResponder) BuildTx(ctx context.Context, response Claim) ([]byte, e
}
}
// Resolve executes a resolve transaction to resolve a fault dispute game.
func (r *faultResponder) Resolve(ctx context.Context) error {
txData, err := r.buildResolveData()
if err != nil {
return err
}
return r.sendTxAndWait(ctx, txData)
}
// Respond takes a [Claim] and executes the response action.
func (r *faultResponder) Respond(ctx context.Context, response Claim) error {
txData, err := r.BuildTx(ctx, response)
......
......@@ -57,6 +57,24 @@ func newTestFaultResponder(t *testing.T, sendFails bool) (*faultResponder, *mock
return responder, mockTxMgr
}
// TestResponder_Resolve_SendFails tests the [Responder.Resolve] method
// bubbles up the error returned by the [txmgr.Send] method.
func TestResponder_Resolve_SendFails(t *testing.T) {
responder, mockTxMgr := newTestFaultResponder(t, true)
err := responder.Resolve(context.Background())
require.ErrorIs(t, err, mockSendError)
require.Equal(t, 0, mockTxMgr.sends)
}
// TestResponder_Resolve_Success tests the [Responder.Resolve] method
// succeeds when the tx candidate is successfully sent through the txmgr.
func TestResponder_Resolve_Success(t *testing.T) {
responder, mockTxMgr := newTestFaultResponder(t, false)
err := responder.Resolve(context.Background())
require.NoError(t, err)
require.Equal(t, 1, mockTxMgr.sends)
}
// TestResponder_Respond_SendFails tests the [Responder.Respond] method
// bubbles up the error returned by the [txmgr.Send] method.
func TestResponder_Respond_SendFails(t *testing.T) {
......
......@@ -71,10 +71,13 @@ type StepData struct {
// AttemptStep determines what step should occur for a given leaf claim.
// An error will be returned if the claim is not at the max depth.
func (s *Solver) AttemptStep(claim Claim) (StepData, error) {
func (s *Solver) AttemptStep(claim Claim, agreeWithClaimLevel bool) (StepData, error) {
if claim.Depth() != s.gameDepth {
return StepData{}, errors.New("cannot step on non-leaf claims")
}
if agreeWithClaimLevel {
return StepData{}, errors.New("cannot step on claims we agree with")
}
claimCorrect, err := s.agreeWithClaim(claim.ClaimData)
if err != nil {
return StepData{}, err
......
......@@ -111,18 +111,29 @@ func TestAttemptStep(t *testing.T) {
},
}
step, err := solver.AttemptStep(bottom)
step, err := solver.AttemptStep(bottom, false)
require.NoError(t, err)
require.Equal(t, bottom, step.LeafClaim)
require.True(t, step.IsAttack)
require.Equal(t, step.PreState, BuildAlphabetPreimage(3, "d"))
_, err = solver.AttemptStep(middle)
_, err = solver.AttemptStep(middle, false)
require.Error(t, err)
step, err = solver.AttemptStep(zero)
step, err = solver.AttemptStep(zero, false)
require.NoError(t, err)
require.Equal(t, zero, step.LeafClaim)
require.True(t, step.IsAttack)
require.Equal(t, canonicalProvider.AbsolutePreState(), step.PreState)
}
func TestAttempStep_AgreeWithClaimLevel_Fails(t *testing.T) {
maxDepth := 3
canonicalProvider := NewAlphabetProvider("abcdefgh", uint64(maxDepth))
solver := NewSolver(maxDepth, canonicalProvider)
_, _, middle, _ := createTestClaims()
step, err := solver.AttemptStep(middle, true)
require.Error(t, err)
require.Equal(t, StepData{}, step)
}
......@@ -38,6 +38,11 @@ var (
Usage: "Temporary hardcoded flag if we agree or disagree with the proposed output.",
EnvVars: prefixEnvVars("AGREE_WITH_PROPOSED_OUTPUT"),
}
GameDepthFlag = &cli.IntFlag{
Name: "game-depth",
Usage: "Depth of the game tree.",
EnvVars: prefixEnvVars("GAME_DEPTH"),
}
// Optional Flags
)
......@@ -47,6 +52,7 @@ var requiredFlags = []cli.Flag{
DGFAddressFlag,
AlphabetFlag,
AgreeWithProposedOutputFlag,
GameDepthFlag,
}
// optionalFlags is a list of unchecked cli flags
......
......@@ -4,7 +4,6 @@ set -euo pipefail
DIR=$(cd $(dirname "${BASH_SOURCE[0]}") && pwd)
cd "$DIR"
# build the challenger to keep it up to date
make
cd ..
......@@ -21,21 +20,17 @@ CHARLIE_KEY="74feb147d72bfae943e6b4e483410933d9e447d5dc47d52432dcc2c1454dabb7"
MALLORY_ADDRESS="0x4641c704a6c743f73ee1f36C7568Fbf4b80681e4"
MALLORY_KEY="28d7045146193f5f4eeb151c4843544b1b0d30a7ac1680c845a416fac65a7715"
echo "----------------------------------------------------------------"
echo " - Fetching balance of the sponsor"
echo " - Balance: $(cast balance 0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266)"
echo "----------------------------------------------------------------"
echo "Funding Charlie"
cast send $CHARLIE_ADDRESS --value 5ether --private-key $DEVNET_SPONSOR
echo "Funding Mallory"
cast send $MALLORY_ADDRESS --value 5ether --private-key $DEVNET_SPONSOR
# Fault game type = 0
GAME_TYPE=0
# Root claim commits to the entire trace.
......@@ -45,8 +40,6 @@ ROOT_CLAIM=$(cast keccak $(cast abi-encode "f(uint256,uint256)" 15 122))
# Doesn't matter right now since we're not deleting outputs, so just set it to 1
EXTRA_DATA=$(cast to-bytes32 1)
echo "Initializing the game"
cast call --private-key $MALLORY_KEY $DISPUTE_GAME_PROXY "create(uint8,bytes32,bytes)" $GAME_TYPE $ROOT_CLAIM $EXTRA_DATA
cast send --private-key $MALLORY_KEY $DISPUTE_GAME_PROXY "create(uint8,bytes32,bytes)" $GAME_TYPE $ROOT_CLAIM $EXTRA_DATA
#!/bin/bash
set -euo pipefail
DIR=$(cd $(dirname "${BASH_SOURCE[0]}") && pwd)
cd "$DIR"
......@@ -15,4 +14,11 @@ MALLORY_KEY="28d7045146193f5f4eeb151c4843544b1b0d30a7ac1680c845a416fac65a7715"
FAULT_GAME_ADDRESS="0x8daf17a20c9dba35f005b6324f493785d239719d"
./bin/op-challenger --l1-eth-rpc http://localhost:8545 --alphabet "abcdexyz" --game-address $FAULT_GAME_ADDRESS --private-key $MALLORY_KEY --num-confirmations 1 --agree-with-proposed-output=false
./bin/op-challenger \
--l1-eth-rpc http://localhost:8545 \
--alphabet "abcdexyz" \
--game-address $FAULT_GAME_ADDRESS \
--private-key $MALLORY_KEY \
--num-confirmations 1 \
--game-depth 4 \
--agree-with-proposed-output=false
package op_e2e
import (
"time"
"github.com/ethereum-optimism/optimism/op-service/clock"
"github.com/ethereum/go-ethereum"
"github.com/ethereum/go-ethereum/beacon/engine"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/eth"
"github.com/ethereum/go-ethereum/eth/catalyst"
"github.com/ethereum/go-ethereum/event"
"github.com/ethereum/go-ethereum/log"
)
// fakePoS is a testing-only utility to attach to Geth,
// to build a fake proof-of-stake L1 chain with fixed block time and basic lagging safe/finalized blocks.
type fakePoS struct {
clock clock.Clock
eth *eth.Ethereum
log log.Logger
blockTime uint64
finalizedDistance uint64
safeDistance uint64
engineAPI *catalyst.ConsensusAPI
sub ethereum.Subscription
}
func (f *fakePoS) Start() error {
if advancing, ok := f.clock.(*clock.AdvancingClock); ok {
advancing.Start()
}
f.sub = event.NewSubscription(func(quit <-chan struct{}) error {
// poll every half a second: enough to catch up with any block time when ticks are missed
t := f.clock.NewTicker(time.Second / 2)
for {
select {
case now := <-t.Ch():
chain := f.eth.BlockChain()
head := chain.CurrentBlock()
finalized := chain.CurrentFinalBlock()
if finalized == nil { // fallback to genesis if nothing is finalized
finalized = chain.Genesis().Header()
}
safe := chain.CurrentSafeBlock()
if safe == nil { // fallback to finalized if nothing is safe
safe = finalized
}
if head.Number.Uint64() > f.finalizedDistance { // progress finalized block, if we can
finalized = f.eth.BlockChain().GetHeaderByNumber(head.Number.Uint64() - f.finalizedDistance)
}
if head.Number.Uint64() > f.safeDistance { // progress safe block, if we can
safe = f.eth.BlockChain().GetHeaderByNumber(head.Number.Uint64() - f.safeDistance)
}
// start building the block as soon as we are past the current head time
if head.Time >= uint64(now.Unix()) {
continue
}
newBlockTime := head.Time + f.blockTime
if time.Unix(int64(newBlockTime), 0).Add(5 * time.Minute).Before(f.clock.Now()) {
// We're a long way behind, let's skip some blocks...
newBlockTime = uint64(f.clock.Now().Unix())
}
res, err := f.engineAPI.ForkchoiceUpdatedV1(engine.ForkchoiceStateV1{
HeadBlockHash: head.Hash(),
SafeBlockHash: safe.Hash(),
FinalizedBlockHash: finalized.Hash(),
}, &engine.PayloadAttributes{
Timestamp: newBlockTime,
Random: common.Hash{},
SuggestedFeeRecipient: head.Coinbase,
})
if err != nil {
f.log.Error("failed to start building L1 block", "err", err)
continue
}
if res.PayloadID == nil {
f.log.Error("failed to start block building", "res", res)
continue
}
// wait with sealing, if we are not behind already
delay := time.Unix(int64(newBlockTime), 0).Sub(f.clock.Now())
tim := f.clock.NewTimer(delay)
select {
case <-tim.Ch():
// no-op
case <-quit:
tim.Stop()
return nil
}
payload, err := f.engineAPI.GetPayloadV1(*res.PayloadID)
if err != nil {
f.log.Error("failed to finish building L1 block", "err", err)
continue
}
if _, err := f.engineAPI.NewPayloadV1(*payload); err != nil {
f.log.Error("failed to insert built L1 block", "err", err)
continue
}
if _, err := f.engineAPI.ForkchoiceUpdatedV1(engine.ForkchoiceStateV1{
HeadBlockHash: payload.BlockHash,
SafeBlockHash: safe.Hash(),
FinalizedBlockHash: finalized.Hash(),
}, nil); err != nil {
f.log.Error("failed to make built L1 block canonical", "err", err)
continue
}
case <-quit:
return nil
}
}
})
return nil
}
func (f *fakePoS) Stop() error {
f.sub.Unsubscribe()
if advancing, ok := f.clock.(*clock.AdvancingClock); ok {
advancing.Stop()
}
return nil
}
package op_e2e
import (
"context"
"testing"
"time"
"github.com/ethereum-optimism/optimism/op-service/client/utils"
"github.com/stretchr/testify/require"
)
func TestTimeTravel(t *testing.T) {
InitParallel(t)
cfg := DefaultSystemConfig(t)
delete(cfg.Nodes, "verifier")
cfg.SupportL1TimeTravel = true
sys, err := cfg.Start()
require.Nil(t, err, "Error starting up system")
defer sys.Close()
l1Client := sys.Clients["l1"]
preTravel, err := l1Client.BlockByNumber(context.Background(), nil)
require.NoError(t, err)
sys.TimeTravelClock.AdvanceTime(24 * time.Hour)
// Check that the L1 chain reaches the new time reasonably quickly (ie without taking a week)
// It should be able to jump straight to the new time with just a single block
ctx, cancel := context.WithTimeout(context.Background(), 3*time.Minute)
defer cancel()
err = utils.WaitFor(ctx, time.Second, func() (bool, error) {
postTravel, err := l1Client.BlockByNumber(context.Background(), nil)
if err != nil {
return false, err
}
diff := time.Duration(postTravel.Time()-preTravel.Time()) * time.Second
return diff.Hours() > 23, nil
})
require.NoError(t, err)
}
......@@ -9,9 +9,9 @@ import (
"time"
"github.com/ethereum-optimism/optimism/op-node/rollup/derive"
"github.com/ethereum-optimism/optimism/op-service/clock"
"github.com/ethereum/go-ethereum"
"github.com/ethereum/go-ethereum/accounts/keystore"
"github.com/ethereum/go-ethereum/beacon/engine"
"github.com/ethereum/go-ethereum/cmd/utils"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/core"
......@@ -21,7 +21,6 @@ import (
"github.com/ethereum/go-ethereum/eth/ethconfig"
"github.com/ethereum/go-ethereum/eth/tracers"
"github.com/ethereum/go-ethereum/ethclient"
"github.com/ethereum/go-ethereum/event"
"github.com/ethereum/go-ethereum/log"
"github.com/ethereum/go-ethereum/miner"
"github.com/ethereum/go-ethereum/node"
......@@ -110,7 +109,7 @@ func waitForBlock(number *big.Int, client *ethclient.Client, timeout time.Durati
}
}
func initL1Geth(cfg *SystemConfig, genesis *core.Genesis, opts ...GethOption) (*node.Node, *eth.Ethereum, error) {
func initL1Geth(cfg *SystemConfig, genesis *core.Genesis, c clock.Clock, opts ...GethOption) (*node.Node, *eth.Ethereum, error) {
ethConfig := &ethconfig.Config{
NetworkId: cfg.DeployConfig.L1ChainID,
Genesis: genesis,
......@@ -134,6 +133,7 @@ func initL1Geth(cfg *SystemConfig, genesis *core.Genesis, opts ...GethOption) (*
// Instead of running a whole beacon node, we run this fake-proof-of-stake sidecar that sequences L1 blocks using the Engine API.
l1Node.RegisterLifecycle(&fakePoS{
clock: c,
eth: l1Eth,
log: log.Root(), // geth logger is global anyway. Would be nice to replace with a local logger though.
blockTime: cfg.DeployConfig.L1BlockTime,
......@@ -146,104 +146,6 @@ func initL1Geth(cfg *SystemConfig, genesis *core.Genesis, opts ...GethOption) (*
return l1Node, l1Eth, nil
}
// fakePoS is a testing-only utility to attach to Geth,
// to build a fake proof-of-stake L1 chain with fixed block time and basic lagging safe/finalized blocks.
type fakePoS struct {
eth *eth.Ethereum
log log.Logger
blockTime uint64
finalizedDistance uint64
safeDistance uint64
engineAPI *catalyst.ConsensusAPI
sub ethereum.Subscription
}
func (f *fakePoS) Start() error {
f.sub = event.NewSubscription(func(quit <-chan struct{}) error {
// poll every half a second: enough to catch up with any block time when ticks are missed
t := time.NewTicker(time.Second / 2)
for {
select {
case now := <-t.C:
chain := f.eth.BlockChain()
head := chain.CurrentBlock()
finalized := chain.CurrentFinalBlock()
if finalized == nil { // fallback to genesis if nothing is finalized
finalized = chain.Genesis().Header()
}
safe := chain.CurrentSafeBlock()
if safe == nil { // fallback to finalized if nothing is safe
safe = finalized
}
if head.Number.Uint64() > f.finalizedDistance { // progress finalized block, if we can
finalized = f.eth.BlockChain().GetHeaderByNumber(head.Number.Uint64() - f.finalizedDistance)
}
if head.Number.Uint64() > f.safeDistance { // progress safe block, if we can
safe = f.eth.BlockChain().GetHeaderByNumber(head.Number.Uint64() - f.safeDistance)
}
// start building the block as soon as we are past the current head time
if head.Time >= uint64(now.Unix()) {
continue
}
res, err := f.engineAPI.ForkchoiceUpdatedV1(engine.ForkchoiceStateV1{
HeadBlockHash: head.Hash(),
SafeBlockHash: safe.Hash(),
FinalizedBlockHash: finalized.Hash(),
}, &engine.PayloadAttributes{
Timestamp: head.Time + f.blockTime,
Random: common.Hash{},
SuggestedFeeRecipient: head.Coinbase,
})
if err != nil {
f.log.Error("failed to start building L1 block", "err", err)
continue
}
if res.PayloadID == nil {
f.log.Error("failed to start block building", "res", res)
continue
}
// wait with sealing, if we are not behind already
delay := time.Until(time.Unix(int64(head.Time+f.blockTime), 0))
tim := time.NewTimer(delay)
select {
case <-tim.C:
// no-op
case <-quit:
tim.Stop()
return nil
}
payload, err := f.engineAPI.GetPayloadV1(*res.PayloadID)
if err != nil {
f.log.Error("failed to finish building L1 block", "err", err)
continue
}
if _, err := f.engineAPI.NewPayloadV1(*payload); err != nil {
f.log.Error("failed to insert built L1 block", "err", err)
continue
}
if _, err := f.engineAPI.ForkchoiceUpdatedV1(engine.ForkchoiceStateV1{
HeadBlockHash: payload.BlockHash,
SafeBlockHash: safe.Hash(),
FinalizedBlockHash: finalized.Hash(),
}, nil); err != nil {
f.log.Error("failed to make built L1 block canonical", "err", err)
continue
}
case <-quit:
return nil
}
}
})
return nil
}
func (f *fakePoS) Stop() error {
f.sub.Unsubscribe()
return nil
}
func defaultNodeConfig(name string, jwtPath string) *node.Config {
return &node.Config{
Name: name,
......
......@@ -241,6 +241,9 @@ type SystemConfig struct {
// Target L1 tx size for the batcher transactions
BatcherTargetL1TxSizeBytes uint64
// SupportL1TimeTravel determines if the L1 node supports quickly skipping forward in time
SupportL1TimeTravel bool
}
type System struct {
......@@ -258,6 +261,13 @@ type System struct {
L2OutputSubmitter *l2os.L2OutputSubmitter
BatchSubmitter *bss.BatchSubmitter
Mocknet mocknet.Mocknet
// TimeTravelClock is nil unless SystemConfig.SupportL1TimeTravel was set to true
// It provides access to the clock instance used by the L1 node. Calling TimeTravelClock.AdvanceBy
// allows tests to quickly time travel L1 into the future.
// Note that this time travel may occur in a single block, creating a very large difference in the Time
// on sequential blocks.
TimeTravelClock *clock.AdvancingClock
}
func (sys *System) NodeEndpoint(name string) string {
......@@ -339,6 +349,12 @@ func (cfg SystemConfig) Start(_opts ...SystemConfigOption) (*System, error) {
}
}()
c := clock.SystemClock
if cfg.SupportL1TimeTravel {
sys.TimeTravelClock = clock.NewAdvancingClock(100 * time.Millisecond)
c = sys.TimeTravelClock
}
l1Genesis, err := genesis.BuildL1DeveloperGenesis(cfg.DeployConfig)
if err != nil {
return nil, err
......@@ -412,7 +428,7 @@ func (cfg SystemConfig) Start(_opts ...SystemConfigOption) (*System, error) {
sys.RollupConfig = &defaultConfig
// Initialize nodes
l1Node, l1Backend, err := initL1Geth(&cfg, l1Genesis, cfg.GethOptions["l1"]...)
l1Node, l1Backend, err := initL1Geth(&cfg, l1Genesis, c, cfg.GethOptions["l1"]...)
if err != nil {
return nil, err
}
......
......@@ -8,7 +8,6 @@ import (
"math/big"
"time"
"github.com/ethereum-optimism/optimism/op-e2e/e2eutils"
"github.com/ethereum/go-ethereum/accounts/abi"
"github.com/ethereum/go-ethereum/accounts/abi/bind"
"github.com/ethereum/go-ethereum/common"
......@@ -19,6 +18,7 @@ import (
"github.com/ethereum-optimism/optimism/op-bindings/bindings"
"github.com/ethereum-optimism/optimism/op-bindings/predeploys"
"github.com/ethereum-optimism/optimism/op-service/client/utils"
)
var MessagePassedTopic = crypto.Keccak256Hash([]byte("MessagePassed(uint256,address,address,uint256,uint256,bytes,bytes32)"))
......@@ -36,7 +36,7 @@ func WaitForOutputRootPublished(ctx context.Context, client *ethclient.Client, p
}
getL2BlockFromLatestOutput := func() (*big.Int, error) { return l2OO.LatestBlockNumber(opts) }
outputBlockNum, err := e2eutils.WaitAndGet[*big.Int](ctx, time.Second, getL2BlockFromLatestOutput, func(latest *big.Int) bool {
outputBlockNum, err := utils.WaitAndGet[*big.Int](ctx, time.Second, getL2BlockFromLatestOutput, func(latest *big.Int) bool {
return latest.Cmp(l2BlockNumber) >= 0
})
if err != nil {
......@@ -72,7 +72,7 @@ func WaitForFinalizationPeriod(ctx context.Context, client *ethclient.Client, po
// Assume clock is relatively correct
time.Sleep(time.Until(targetTime))
// Poll for L1 Block to have a time greater than the target time
return e2eutils.WaitFor(ctx, time.Second, func() (bool, error) {
return utils.WaitFor(ctx, time.Second, func() (bool, error) {
header, err := client.HeaderByNumber(ctx, nil)
if err != nil {
return false, fmt.Errorf("retrieve latest header: %w", err)
......
package e2eutils
package utils
import (
"context"
......
package clock
import (
"sync/atomic"
"time"
)
type AdvancingClock struct {
*DeterministicClock
systemTime Clock
ticker Ticker
advanceEvery time.Duration
quit chan interface{}
running atomic.Bool
lastTick time.Time
}
// NewAdvancingClock creates a clock that, when started, advances at the same rate as the system clock but
// can also be advanced arbitrary amounts using the AdvanceTime method.
// Unlike the system clock, time does not progress smoothly but only increments when AdvancedTime is called or
// approximately after advanceEvery duration has elapsed. When advancing based on the system clock, the total time
// the system clock has advanced is added to the current time, preventing time differences from building up over time.
func NewAdvancingClock(advanceEvery time.Duration) *AdvancingClock {
now := SystemClock.Now()
return &AdvancingClock{
DeterministicClock: NewDeterministicClock(now),
systemTime: SystemClock,
advanceEvery: advanceEvery,
quit: make(chan interface{}),
lastTick: now,
}
}
func (c *AdvancingClock) Start() {
if !c.running.CompareAndSwap(false, true) {
// Already running
return
}
c.ticker = c.systemTime.NewTicker(c.advanceEvery)
go func() {
for {
select {
case now := <-c.ticker.Ch():
c.onTick(now)
case <-c.quit:
return
}
}
}()
}
func (c *AdvancingClock) Stop() {
if !c.running.CompareAndSwap(true, false) {
// Already stopped
return
}
c.quit <- nil
}
func (c *AdvancingClock) onTick(now time.Time) {
if !now.After(c.lastTick) {
// Time hasn't progressed for some reason, so do nothing
return
}
// Advance time by however long it has been since the last update.
// Ensures we don't drift from system time by more and more over time
advanceBy := now.Sub(c.lastTick)
c.AdvanceTime(advanceBy)
c.lastTick = now
}
package clock
import (
"testing"
"time"
"github.com/stretchr/testify/require"
)
func TestAdvancingClock_AdvancesByTimeBetweenTicks(t *testing.T) {
clock, realTime := newTestAdvancingClock(1 * time.Second)
clock.Start()
defer clock.Stop()
eventTicker := clock.NewTicker(1 * time.Second)
start := clock.Now()
realTime.AdvanceTime(1 * time.Second)
require.Equal(t, start.Add(1*time.Second), <-eventTicker.Ch(), "should trigger events when advancing")
require.Equal(t, start.Add(1*time.Second), clock.Now(), "Should advance on single tick")
start = clock.Now()
realTime.AdvanceTime(15 * time.Second)
require.Equal(t, start.Add(15*time.Second), <-eventTicker.Ch(), "should trigger events when advancing")
require.Equal(t, start.Add(15*time.Second), clock.Now(), "Should advance by time between ticks")
}
func TestAdvancingClock_Stop(t *testing.T) {
clock, realTime := newTestAdvancingClock(1 * time.Second)
clock.Start()
defer clock.Stop()
eventTicker := clock.NewTicker(1 * time.Second)
// Stop the clock again
clock.Stop()
start := clock.Now()
realTime.AdvanceTime(15 * time.Second)
clock.Start()
// Trigger the next tick
realTime.AdvanceTime(1 * time.Second)
// Time advances by the whole time the clock was stopped
// Note: if events were triggered while the clock was stopped, this event would be for the wrong time
require.Equal(t, start.Add(16*time.Second), <-eventTicker.Ch(), "should trigger events again after restarting")
require.Equal(t, start.Add(16*time.Second), clock.Now(), "Should advance by time between ticks after restarting")
}
func newTestAdvancingClock(advanceEvery time.Duration) (*AdvancingClock, *DeterministicClock) {
systemTime := NewDeterministicClock(time.UnixMilli(1000))
clock := &AdvancingClock{
DeterministicClock: NewDeterministicClock(time.UnixMilli(5000)),
systemTime: systemTime,
advanceEvery: advanceEvery,
quit: make(chan interface{}),
lastTick: systemTime.Now(),
}
return clock, systemTime
}
......@@ -107,8 +107,12 @@ func (t *ticker) fire(now time.Time) bool {
if t.stopped {
return false
}
t.ch <- now
t.nextDue = now.Add(t.period)
// Publish without blocking and only update due time if we publish successfully
select {
case t.ch <- now:
t.nextDue = now.Add(t.period)
default:
}
return true
}
......
......@@ -2,6 +2,7 @@ package clock
import (
"context"
"sync"
"sync/atomic"
"testing"
"time"
......@@ -156,6 +157,38 @@ func TestNewTicker(t *testing.T) {
require.Len(t, ticker.Ch(), 0, "should not fire until due again")
})
t.Run("SkipsFiringWhenProcessingIsSlow", func(t *testing.T) {
clock := NewDeterministicClock(time.UnixMilli(1000))
ticker := clock.NewTicker(5 * time.Second)
// Fire once to fill the channel queue
clock.AdvanceTime(5 * time.Second)
firstEventTime := clock.Now()
var startProcessing sync.WaitGroup
startProcessing.Add(1)
processedTicks := make(chan time.Time)
go func() {
startProcessing.Wait()
// Read two events then exit
for i := 0; i < 2; i++ {
event := <-ticker.Ch()
processedTicks <- event
}
}()
// Advance time further before processing of events has started
// Can't publish any further events to the channel but shouldn't block
clock.AdvanceTime(30 * time.Second)
// Allow processing to start
startProcessing.Done()
require.Equal(t, firstEventTime, <-processedTicks, "Should process first event")
clock.AdvanceTime(5 * time.Second)
require.Equal(t, clock.Now(), <-processedTicks, "Should skip to latest time")
})
t.Run("StopFiring", func(t *testing.T) {
clock := NewDeterministicClock(time.UnixMilli(1000))
ticker := clock.NewTicker(5 * time.Second)
......
......@@ -10,6 +10,7 @@
},
"scripts": {
"clean": "npx nx run-many --target=clean",
"bindings": "nx bindings @eth-optimism/contracts-bedrock",
"build": "npx nx run-many --target=build",
"test": "npx nx run-many --target=test",
"lint": "npx nx run-many --target=lint",
......
......@@ -44,7 +44,7 @@
"l1GenesisBlockTimestamp": "0x64935846",
"l1StartingBlockTag": "earliest",
"l2GenesisRegolithTimeOffset": "0x0",
"faultGameAbsolutePrestate": 140,
"faultGameAbsolutePrestate": 96,
"faultGameMaxDepth": 4,
"faultGameMaxDuration": 604800
}
}
\ No newline at end of file
......@@ -9,8 +9,10 @@
"contracts/**/*.sol"
],
"scripts": {
"bindings": "cd ../../op-bindings && make",
"build": "npx nx build:contracts",
"bindings": "pnpm bindings:ts && pnpm bindings:go",
"bindings:ts": "pnpm generate:addresses && nx generate @eth-optimism/contracts-ts",
"bindings:go": "cd ../../op-bindings && make",
"build": "nx build:contracts",
"prebuild:contracts": "./scripts/verify-foundry-install.sh",
"build:contracts": "pnpm build:forge",
"build:forge": "forge build",
......
artifacts
cache
typechain
.deps
.envrc
.env
/dist/
coverage
artifacts
cache
typechain
.deps
.envrc
.env
/dist/
module.exports = {
...require('../../.prettierrc.js'),
}
# Code gen
Summary -
- This package is generated from [contracts-bedrock](../contracts-bedrock/)
- It's version is kept in sync with contracts bedrock via the [changeset config](../../.changeset/config.json) e.g. if contracts-bedrock is `4.2.0` this package will have the same version.
## Code gen instructions
To run the code gen run the `generate` script from [package.json](./package.json). Make sure node modules is installed.
```bash
pnpm i && pnpm generate
```
MIT License
Copyright (c) 2022 Optimism
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
## Contracts TS
[![codecov](https://codecov.io/gh/ethereum-optimism/optimism/branch/develop/graph/badge.svg?token=0VTG7PG7YR&flag=contracts-bedrock-tests)](https://codecov.io/gh/ethereum-optimism/optimism)
ABI and Address constants + generated code from [@eth-optimism/contracts-bedrock/](../contracts-bedrock/) for use in TypeScript.
Much of this package is generated. See [CODE_GEN.md](./CODE_GEN.md) for instructions on how to generate.
#### @eth-optimism/contracts-ts
The main entrypoint exports constants related to contracts bedrock as const. As const allows it to be [used in TypeScript with stronger typing than importing JSON](https://github.com/microsoft/TypeScript/issues/32063).
- Exports contract abis.
- Exports contract addresses
```typescript
import {
l2OutputOracleProxyABI,
l2OutputOracleAddresses,
} from '@eth-optimism/contracts-ts'
console.log(l2OutputOracleAddresses[10], abi)
```
Addresses are also exported as an object for convenience.
```typescript
import { addresses } from '@eth-optimism/contracts-ts'
console.log(addresses.l2OutputOracle[10])
```
#### @eth-optimism/contracts-ts/react
- All [React hooks](https://wagmi.sh/cli/plugins/react) `@eth-optimism/contracts-ts/react`
```typescript
import { useAddressManagerAddress } from '@eth-optimism/contracts-ts/react'
const component = () => {
const { data, error, loading } = useAddressManagerAddress()
if (loading) {
return <div>Loading</div>
}
if (err) {
return <div>Error</div>
}
return <div>{data}</div>
}
```
#### @eth-optimism/contracts-ts/actions
- All [wagmi actions](https://wagmi.sh/react/actions) for use in Vanilla JS or non react code
```typescript
import { readSystemConfig } from '@eth-optimism/contracts-ts/actions'
console.log(await readSystemConfig())
```
#### See Also
- [Contracts bedrock specs](../../specs/)
- [Wagmi](https://wagmi.sh)
This diff is collapsed.
{
"AddressManager": {
"1": "0xdE1FCfB0851916CA5101820A69b13a4E276bd81F",
"5": "0xa6f73589243a6A7a9023b1Fa0651b1d89c177111"
},
"AssetReceiver": {
"1": "0x15DdA60616Ffca20371ED1659dBB78E888f65556",
"10": "0x15DdA60616Ffca20371ED1659dBB78E888f65556"
},
"CheckBalanceHigh": {
"1": "0x7eC64a8a591bFf829ff6C8be76074D540ACb813F",
"5": "0x7eC64a8a591bFf829ff6C8be76074D540ACb813F",
"420": "0x5d7103853f12109A7d27F118e54BbC654ad847E9"
},
"CheckBalanceLow": {
"1": "0x381a4eFC2A2C914eA1889722bB4B44Fa6BD5b640",
"5": "0x381a4eFC2A2C914eA1889722bB4B44Fa6BD5b640",
"420": "0x7Ce13D154FAEE5C8B3E6b19d4Add16f21d884474"
},
"CheckGelatoLow": {
"1": "0x4f7CFc43f6D262a085F3b946cAC69E7a8E39BBAa",
"5": "0x4f7CFc43f6D262a085F3b946cAC69E7a8E39BBAa",
"420": "0xF9c8a4Cb4021f57F9f6d69799cA9BefF64524862"
},
"CheckTrue": {
"1": "0x5c741a38cb11424711231777D71689C458eE835D",
"5": "0x5c741a38cb11424711231777D71689C458eE835D",
"420": "0x47443D0C184e022F19BD1578F5bca6B8a9F58E32"
},
"Drippie": {
"1": "0x44b3A2a040057eBafC601A78647e805fd58B1f50"
},
"Drippie_goerli": {
"5": "0x44b3A2a040057eBafC601A78647e805fd58B1f50"
},
"Drippie_optimism-goerli": {
"420": "0x8D8d533C16D23847EB04EEB0925be8900Dd3af86"
},
"L1CrossDomainMessenger": {
"1": "0x25ace71c97B33Cc4729CF772ae268934F7ab5fA1",
"5": "0x5086d1eEF304eb5284A0f6720f79403b4e9bE294"
},
"L1ERC721Bridge": {
"1": "0x5a7749f83b81B301cAb5f48EB8516B986DAef23D",
"5": "0x8DD330DdE8D9898d43b4dc840Da27A07dF91b3c9"
},
"L1StandardBridge": {
"1": "0x99C9fc46f92E8a1c0deC1b1747d010903E884bE1",
"5": "0x636Af16bf2f682dD3109e60102b8E1A089FedAa8"
},
"L2OutputOracle": {
"1": "0xdfe97868233d1aa22e815a266982f2cf17685a27",
"5": "0xE6Dfba0953616Bacab0c9A8ecb3a9BBa77FC15c0"
},
"OptimismMintableERC20Factory": {
"1": "0x4200000000000000000000000000000000000012",
"5": "0x4200000000000000000000000000000000000012",
"420": "0x4200000000000000000000000000000000000012"
},
"OptimismPortal": {
"1": "0xbEb5Fc579115071764c7423A4f12eDde41f106Ed",
"5": "0x5b47E1A08Ea6d985D6649300584e6722Ec4B1383"
},
"PortalSender": {
"1": "0x0A893d9576b9cFD9EF78595963dc973238E78210",
"5": "0xe7FACd39531ee3C313330E93B4d7a8B8A3c84Aa4"
},
"ProxyAdmin": {
"1": "0x4200000000000000000000000000000000000018",
"5": "0x4200000000000000000000000000000000000018"
},
"SystemConfig": {
"1": "0x229047fed2591dbec1eF1118d64F7aF3dB9EB290",
"5": "0xAe851f927Ee40dE99aaBb7461C00f9622ab91d60"
},
"SystemDictator": {
"1": "0xB4453CEb33d2e67FA244A24acf2E50CEF31F53cB"
},
"SystemDictator_goerli": {
"5": "0x1f0613A44c9a8ECE7B3A2e0CdBdF0F5B47A50971"
},
"TeleportrWithdrawer": {
"1": "0x78A25524D90E3D0596558fb43789bD800a5c3007"
},
"AttestationStation": {
"10": "0xEE36eaaD94d1Cc1d0eccaDb55C38bFfB6Be06C77",
"420": "0xEE36eaaD94d1Cc1d0eccaDb55C38bFfB6Be06C77"
},
"L2ERC721Bridge": {
"10": "0x4200000000000000000000000000000000000014",
"420": "0x4200000000000000000000000000000000000014"
},
"MintManager": {
"10": "0x5C4e7Ba1E219E47948e6e3F55019A647bA501005",
"420": "0x038a8825A3C3B0c08d52Cc76E5E361953Cf6Dc76"
},
"OptimismMintableERC721Factory": {
"10": "0x4200000000000000000000000000000000000017"
},
"OptimismMintableERC721Factory_optimism-goerli": {
"420": "0x4200000000000000000000000000000000000017"
},
"Optimist": {
"10": "0x2335022c740d17c2837f9C884Bfe4fFdbf0A95D5",
"420": "0x2335022c740d17c2837f9C884Bfe4fFdbf0A95D5"
},
"OptimistAllowlist": {
"10": "0x482b1945D58f2E9Db0CEbe13c7fcFc6876b41180",
"420": "0x482b1945D58f2E9Db0CEbe13c7fcFc6876b41180"
},
"OptimistInviter": {
"10": "0x073031A1E1b8F5458Ed41Ce56331F5fd7e1de929",
"420": "0x073031A1E1b8F5458Ed41Ce56331F5fd7e1de929"
},
"BaseFeeVault": {
"420": "0x4200000000000000000000000000000000000019"
},
"GasPriceOracle": {
"420": "0x420000000000000000000000000000000000000F"
},
"L1Block": {
"420": "0x4200000000000000000000000000000000000015"
},
"L1FeeVault": {
"420": "0x420000000000000000000000000000000000001a"
},
"L2CrossDomainMessenger": {
"420": "0x4200000000000000000000000000000000000007"
},
"L2StandardBridge": {
"420": "0x4200000000000000000000000000000000000010"
},
"L2ToL1MessagePasser": {
"420": "0x4200000000000000000000000000000000000016"
},
"SequencerFeeVault": {
"420": "0x4200000000000000000000000000000000000011"
}
}
VITE_RPC_URL_L2_GOERLI=
VITE_RPC_URL_L2_MAINNET=
VITE_RPC_URL_L1_GOERLI=
VITE_RPC_URL_L1_MAINNET=
{
"name": "@eth-optimism/contracts-ts",
"version": "0.15.0",
"description": "TypeScript interface for Contracts Bedrock",
"license": "MIT",
"repository": {
"type": "git",
"url": "https://github.com/ethereum-optimism/optimism.git",
"directory": "packages/contracts-ts"
},
"homepage": "https://optimism.io",
"type": "module",
"main": "dist/constants.js",
"module": "dist/constants.mjs",
"types": "src/constants.ts",
"exports": {
".": {
"types": "./src/constants.ts",
"import": "./dist/constants.js",
"require": "./dist/constants.cjs"
},
"./actions": {
"types": "./src/actions.ts",
"import": "./dist/actions.js",
"require": "./dist/actions.cjs"
},
"./react": {
"types": "./src/react.ts",
"import": "./dist/react.js",
"require": "./dist/react.cjs"
}
},
"files": [
"dist/",
"src/"
],
"scripts": {
"build": "tsup",
"generate": "wagmi generate && pnpm build && pnpm lint:fix",
"generate:check": "pnpm generate && git diff --exit-code ./addresses.json && git diff --exit-code ./abis.json",
"lint": "prettier --check .",
"lint:fix": "prettier --write .",
"test": "vitest",
"test:coverage": "vitest run --coverage",
"typecheck": "tsc --noEmit"
},
"devDependencies": {
"@eth-optimism/contracts-bedrock": "workspace:*",
"@testing-library/jest-dom": "^5.17.0",
"@testing-library/react-hooks": "^8.0.1",
"@types/glob": "^8.1.0",
"@vitest/coverage-istanbul": "^0.33.0",
"@wagmi/cli": "^1.3.0",
"@wagmi/core": "^1.3.8",
"abitype": "^0.9.2",
"glob": "^10.3.3",
"isomorphic-fetch": "^3.0.0",
"jest-dom": "link:@types/@testing-library/jest-dom",
"jsdom": "^22.1.0",
"tsup": "^7.1.0",
"typescript": "^5.1.6",
"vite": "^4.4.4",
"vitest": "^0.33.0"
},
"peerDependencies": {
"@wagmi/core": ">1.0.0",
"wagmi": ">1.0.0"
},
"peerDependenciesMeta": {
"wagmi": {
"optional": true
},
"@wagmi/core": {
"optional": true
}
},
"dependencies": {
"@testing-library/react": "^14.0.0",
"react": "^18.2.0",
"react-dom": "^18.2.0",
"viem": "^1.3.0"
}
}
import fetch from 'isomorphic-fetch'
// viem needs this
global.fetch = fetch
This diff is collapsed.
import { test, expect } from 'vitest'
import { addresses } from './constants'
import { readFileSync } from 'fs'
import { join } from 'path'
const jsonAddresses = JSON.parse(
readFileSync(join(__dirname, '../addresses.json'), 'utf8')
)
test('should have generated addresses', () => {
expect(addresses).toEqual(jsonAddresses)
})
This diff is collapsed.
import matchers from '@testing-library/jest-dom/matchers'
import { cleanup, waitFor } from '@testing-library/react'
import { renderHook } from '@testing-library/react-hooks'
import { afterEach, expect, test } from 'vitest'
import { useMintManagerOwner } from './react'
import { configureChains, createConfig, WagmiConfig } from 'wagmi'
import * as React from 'react'
import { optimism } from 'viem/chains'
import { jsonRpcProvider } from 'wagmi/providers/jsonRpc'
expect.extend(matchers)
afterEach(() => {
cleanup()
})
const { publicClient } = configureChains(
[optimism],
[
jsonRpcProvider({
rpc: () => ({
http:
import.meta.env.VITE_RPC_URL_L2_MAINNET ??
'https://mainnet.optimism.io',
}),
}),
]
)
const config = createConfig({
publicClient: ({ chainId }) => publicClient({ chainId }),
})
const blockNumber = BigInt(106806163)
test('react hooks should work', async () => {
const hook = renderHook(
() => useMintManagerOwner({ chainId: 10, blockNumber }),
{
wrapper: ({ children }) => (
<WagmiConfig config={config}>{children}</WagmiConfig>
),
}
)
await waitFor(() => {
hook.rerender()
if (hook.result.current.error) throw hook.result.current.error
expect(hook.result.current?.data).toBeDefined()
})
const normalizedResult = {
...hook.result.current,
internal: {
...hook.result.current.internal,
dataUpdatedAt: 'SNAPSHOT_TEST_REMOVED!!!',
},
}
expect(normalizedResult).toMatchInlineSnapshot(`
{
"data": "0x2A82Ae142b2e62Cb7D10b55E323ACB1Cab663a26",
"error": null,
"fetchStatus": "idle",
"internal": {
"dataUpdatedAt": "SNAPSHOT_TEST_REMOVED!!!",
"errorUpdatedAt": 0,
"failureCount": 0,
"isFetchedAfterMount": true,
"isLoadingError": false,
"isPaused": false,
"isPlaceholderData": false,
"isPreviousData": false,
"isRefetchError": false,
"isStale": true,
"remove": [Function],
},
"isError": false,
"isFetched": true,
"isFetchedAfterMount": true,
"isFetching": false,
"isIdle": false,
"isLoading": false,
"isRefetching": false,
"isSuccess": true,
"refetch": [Function],
"status": "success",
}
`)
})
This diff is collapsed.
/// <reference types="vite/client" />
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"outDir": "./dist",
"baseUrl": "./src",
"strict": true,
"skipLibCheck": true,
"module": "ESNext",
"moduleResolution": "node",
"jsx": "react",
"target": "ESNext",
"noEmit": true
},
"include": ["./src"]
}
import { defineConfig } from 'tsup'
import packageJson from './package.json'
export default defineConfig({
name: packageJson.name,
entry: ['src/constants.ts', 'src/actions.ts', 'src/react.ts'],
outDir: 'dist',
format: ['esm', 'cjs'],
splitting: false,
sourcemap: true,
clean: false,
})
import { defineConfig } from 'vitest/config'
// @see https://vitest.dev/config/
export default defineConfig({
test: {
setupFiles: './setupVitest.ts',
environment: 'jsdom',
coverage: {
provider: 'istanbul',
},
},
})
This diff is collapsed.
// public rpcs are heavily throttled/rate limited so replace these with rpcs with apikeys. These are meant to be testnet rpcs
// in future these will get renamed to VITE_E2E_RPC_URL_GOERLI etc.
VITE_E2E_RPC_URL_L1=https://ethereum-goerli.publicnode.com
VITE_E2E_RPC_URL_L2=https://goerli.optimism.io
\ No newline at end of file
This diff is collapsed.
......@@ -2,3 +2,4 @@
- The new tests for the next version of sdk will use vitest
- The vitest tests are kept here seperated from mocha tests for now
- Can find values needed in a `.env` file in `example.env`
......@@ -11,22 +11,40 @@ const crossChainMessenger = new CrossChainMessenger({
bedrock: true,
})
describe('prove message', () => {
describe('getMessageStatus', () => {
it(`should be able to correctly find a finalized withdrawal`, async () => {
/**
* Tx hash of legacy withdrawal that was claimed
* Tx hash of a withdrawal
*
* @see https://goerli-optimism.etherscan.io/tx/0xda9e9c8dfc7718bc1499e1e64d8df6cddbabc46e819475a6c755db286a41b9fa
* @see https://goerli-optimism.etherscan.io/tx/0x8fb235a61079f3fa87da66e78c9da075281bc4ba5f1af4b95197dd9480e03bb5
*/
const txWithdrawalHash =
'0xda9e9c8dfc7718bc1499e1e64d8df6cddbabc46e819475a6c755db286a41b9fa'
'0x8fb235a61079f3fa87da66e78c9da075281bc4ba5f1af4b95197dd9480e03bb5'
const txReceipt = await l2Provider.getTransactionReceipt(txWithdrawalHash)
expect(txReceipt).toBeDefined()
expect(await crossChainMessenger.getMessageStatus(txWithdrawalHash)).toBe(
MessageStatus.RELAYED
)
expect(
await crossChainMessenger.getMessageStatus(
txWithdrawalHash,
0,
9370789 - 1000,
9370789
)
).toBe(MessageStatus.RELAYED)
}, 20_000)
it(`should return READY_FOR_RELAY if not in block range`, async () => {
const txWithdrawalHash =
'0x8fb235a61079f3fa87da66e78c9da075281bc4ba5f1af4b95197dd9480e03bb5'
const txReceipt = await l2Provider.getTransactionReceipt(txWithdrawalHash)
expect(txReceipt).toBeDefined()
expect(
await crossChainMessenger.getMessageStatus(txWithdrawalHash, 0, 0, 0)
).toBe(MessageStatus.READY_FOR_RELAY)
}, 20_000)
})
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment