Commit e422c544 authored by mergify[bot]'s avatar mergify[bot] Committed by GitHub

Merge branch 'develop' into dependabot/npm_and_yarn/envalid-8.0.0

parents d41133c8 15dfa313
......@@ -23,4 +23,4 @@ FROM alpine:3.18
COPY --from=builder /app/indexer/indexer /usr/local/bin
CMD ["indexer", "all", "--config", "/app/indexer/indexer.toml"]
CMD ["indexer", "index", "--config", "/app/indexer/indexer.toml"]
package main
import (
"sync"
"github.com/ethereum-optimism/optimism/indexer"
"github.com/ethereum-optimism/optimism/indexer/api"
"github.com/ethereum-optimism/optimism/indexer/config"
......@@ -66,35 +64,6 @@ func runApi(ctx *cli.Context) error {
return api.Start(ctx.Context)
}
func runAll(ctx *cli.Context) error {
log := log.NewLogger(log.ReadCLIConfig(ctx))
// Ensure both processes complete before returning.
var wg sync.WaitGroup
wg.Add(2)
go func() {
defer wg.Done()
err := runApi(ctx)
if err != nil {
log.Error("api process non-zero exit", "err", err)
}
}()
go func() {
defer wg.Done()
err := runIndexer(ctx)
if err != nil {
log.Error("indexer process non-zero exit", "err", err)
}
}()
// We purposefully return no error since the indexer and api
// have no inter-dependencies. We simply rely on the logs to
// report a non-zero exit for either process.
wg.Wait()
return nil
}
func newCli(GitCommit string, GitDate string) *cli.App {
flags := []cli.Flag{ConfigFlag}
flags = append(flags, log.CLIFlags("INDEXER")...)
......@@ -115,12 +84,6 @@ func newCli(GitCommit string, GitDate string) *cli.App {
Description: "Runs the indexing service",
Action: runIndexer,
},
{
Name: "all",
Flags: flags,
Description: "Runs both the api service and the indexing service",
Action: runAll,
},
{
Name: "version",
Description: "print version",
......
......@@ -157,18 +157,20 @@ func LoadConfig(log log.Logger, path string) (Config, error) {
return conf, err
}
if conf.Chain.Preset != 0 {
if conf.Chain.Preset == DEVNET_L2_CHAIN_ID {
preset, err := GetDevnetPreset()
if err != nil {
return conf, err
}
conf.Chain = preset.ChainConfig
} else if conf.Chain.Preset != 0 {
preset, ok := Presets[conf.Chain.Preset]
if !ok {
return conf, fmt.Errorf("unknown preset: %d", conf.Chain.Preset)
}
log.Info("detected preset", "preset", conf.Chain.Preset, "name", preset.Name)
log.Info("setting L1 information from preset")
conf.Chain.L1Contracts = preset.ChainConfig.L1Contracts
conf.Chain.L1StartingHeight = preset.ChainConfig.L1StartingHeight
conf.Chain.L1BedrockStartingHeight = preset.ChainConfig.L1BedrockStartingHeight
conf.Chain.L2BedrockStartingHeight = preset.ChainConfig.L1BedrockStartingHeight
conf.Chain = preset.ChainConfig
}
// Setup L2Contracts from predeploys
......
package config
import (
"encoding/json"
"errors"
"io/fs"
"os"
)
var (
filePath = "../.devnet/addresses.json"
DEVNET_L2_CHAIN_ID = 901
)
func GetDevnetPreset() (*Preset, error) {
if _, err := os.Stat(filePath); errors.Is(err, fs.ErrNotExist) {
return nil, err
}
content, err := os.ReadFile(filePath)
if err != nil {
return nil, err
}
var l1Contracts L1Contracts
if err := json.Unmarshal(content, &l1Contracts); err != nil {
return nil, err
}
if err != nil {
return nil, err
}
return &Preset{
Name: "devnet",
ChainConfig: ChainConfig{
Preset: DEVNET_L2_CHAIN_ID,
L1Contracts: l1Contracts,
},
}, nil
}
......@@ -16,6 +16,7 @@ var Presets = map[int]Preset{
10: {
Name: "Optimism",
ChainConfig: ChainConfig{
Preset: 10,
L1Contracts: L1Contracts{
AddressManager: common.HexToAddress("0xdE1FCfB0851916CA5101820A69b13a4E276bd81F"),
SystemConfigProxy: common.HexToAddress("0x229047fed2591dbec1eF1118d64F7aF3dB9EB290"),
......@@ -37,6 +38,7 @@ var Presets = map[int]Preset{
420: {
Name: "Optimism Goerli",
ChainConfig: ChainConfig{
Preset: 420,
L1Contracts: L1Contracts{
AddressManager: common.HexToAddress("0xa6f73589243a6A7a9023b1Fa0651b1d89c177111"),
SystemConfigProxy: common.HexToAddress("0xAe851f927Ee40dE99aaBb7461C00f9622ab91d60"),
......@@ -58,6 +60,7 @@ var Presets = map[int]Preset{
8453: {
Name: "Base",
ChainConfig: ChainConfig{
Preset: 8453,
L1Contracts: L1Contracts{
AddressManager: common.HexToAddress("0x8EfB6B5c4767B09Dc9AA6Af4eAA89F749522BaE2"),
SystemConfigProxy: common.HexToAddress("0x73a79Fab69143498Ed3712e519A88a918e1f4072"),
......@@ -73,6 +76,7 @@ var Presets = map[int]Preset{
84531: {
Name: "Base Goerli",
ChainConfig: ChainConfig{
Preset: 84531,
L1Contracts: L1Contracts{
AddressManager: common.HexToAddress("0x4Cf6b56b14c6CFcB72A75611080514F94624c54e"),
SystemConfigProxy: common.HexToAddress("0xb15eea247eCE011C68a614e4a77AD648ff495bc1"),
......@@ -88,6 +92,7 @@ var Presets = map[int]Preset{
7777777: {
Name: "Zora",
ChainConfig: ChainConfig{
Preset: 7777777,
L1Contracts: L1Contracts{
AddressManager: common.HexToAddress("0xEF8115F2733fb2033a7c756402Fc1deaa56550Ef"),
SystemConfigProxy: common.HexToAddress("0xA3cAB0126d5F504B071b81a3e8A2BBBF17930d86"),
......@@ -103,6 +108,7 @@ var Presets = map[int]Preset{
999: {
Name: "Zora Goerli",
ChainConfig: ChainConfig{
Preset: 999,
L1Contracts: L1Contracts{
AddressManager: common.HexToAddress("0x54f4676203dEDA6C08E0D40557A119c602bFA246"),
SystemConfigProxy: common.HexToAddress("0xF66C9A5E4fE1A8a9bc44a4aF80505a4C3620Ee64"),
......
......@@ -30,6 +30,7 @@ type ClaimLoader interface {
type Agent struct {
metrics metrics.Metricer
fdgAddr common.Address
solver *solver.GameSolver
loader ClaimLoader
responder Responder
......@@ -39,9 +40,10 @@ type Agent struct {
log log.Logger
}
func NewAgent(m metrics.Metricer, loader ClaimLoader, maxDepth int, trace types.TraceProvider, responder Responder, updater types.OracleUpdater, agreeWithProposedOutput bool, log log.Logger) *Agent {
func NewAgent(m metrics.Metricer, addr common.Address, loader ClaimLoader, maxDepth int, trace types.TraceProvider, responder Responder, updater types.OracleUpdater, agreeWithProposedOutput bool, log log.Logger) *Agent {
return &Agent{
metrics: m,
fdgAddr: addr,
solver: solver.NewGameSolver(maxDepth, trace),
loader: loader,
responder: responder,
......@@ -196,6 +198,7 @@ func (a *Agent) newGameFromContracts(ctx context.Context) (types.Game, error) {
if len(claims) == 0 {
return nil, errors.New("no claims")
}
a.metrics.RecordGameClaimCount(a.fdgAddr.String(), len(claims))
game := types.NewGameState(a.agreeWithProposedOutput, claims[0], uint64(a.maxDepth))
if err := game.PutAll(claims[1:]); err != nil {
return nil, fmt.Errorf("failed to load claims into the local state: %w", err)
......
......@@ -10,10 +10,11 @@ import (
"github.com/ethereum-optimism/optimism/op-challenger/game/fault/types"
gameTypes "github.com/ethereum-optimism/optimism/op-challenger/game/types"
"github.com/ethereum-optimism/optimism/op-challenger/metrics"
"github.com/ethereum-optimism/optimism/op-node/testlog"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/log"
"github.com/stretchr/testify/require"
"github.com/ethereum-optimism/optimism/op-node/testlog"
)
// TestShouldResolve tests the resolution logic.
......@@ -110,11 +111,12 @@ func TestLoadClaimsWhenGameNotResolvable(t *testing.T) {
func setupTestAgent(t *testing.T, agreeWithProposedOutput bool) (*Agent, *stubClaimLoader, *stubResponder) {
logger := testlog.Logger(t, log.LvlInfo)
claimLoader := &stubClaimLoader{}
addr := common.HexToAddress("0x1234")
depth := 4
trace := alphabet.NewTraceProvider("abcd", uint64(depth))
responder := &stubResponder{}
updater := &stubUpdater{}
agent := NewAgent(metrics.NoopMetrics, claimLoader, depth, trace, responder, updater, agreeWithProposedOutput, logger)
agent := NewAgent(metrics.NoopMetrics, addr, claimLoader, depth, trace, responder, updater, agreeWithProposedOutput, logger)
return agent, claimLoader, responder
}
......
......@@ -106,7 +106,7 @@ func NewGamePlayer(
}
return &GamePlayer{
act: NewAgent(m, loader, int(gameDepth), provider, responder, updater, cfg.AgreeWithProposedOutput, logger).Act,
act: NewAgent(m, addr, loader, int(gameDepth), provider, responder, updater, cfg.AgreeWithProposedOutput, logger).Act,
agreeWithProposedOutput: cfg.AgreeWithProposedOutput,
loader: loader,
logger: logger,
......@@ -114,6 +114,10 @@ func NewGamePlayer(
}, nil
}
func (g *GamePlayer) Status() gameTypes.GameStatus {
return g.status
}
func (g *GamePlayer) ProgressGame(ctx context.Context) gameTypes.GameStatus {
if g.status != gameTypes.GameStatusInProgress {
// Game is already complete so don't try to perform further actions.
......
......@@ -109,9 +109,14 @@ func (c *coordinator) createJob(game common.Address) (*job, error) {
return nil, fmt.Errorf("failed to create game player: %w", err)
}
state.player = player
state.status = player.Status()
}
state.inflight = true
return &job{addr: game, player: state.player}, nil
if state.status != types.GameStatusInProgress {
c.logger.Debug("Not rescheduling resolved game", "game", game, "status", state.status)
return nil, nil
}
return &job{addr: game, player: state.player, status: state.status}, nil
}
func (c *coordinator) enqueueJob(ctx context.Context, j job) error {
......
......@@ -150,7 +150,10 @@ func TestDeleteDataForResolvedGames(t *testing.T) {
gameAddrs := []common.Address{gameAddr1, gameAddr2, gameAddr3}
require.NoError(t, c.schedule(ctx, gameAddrs))
require.Len(t, workQueue, len(gameAddrs), "should schedule all games")
// The work queue should only contain jobs for games 1 and 2
// A resolved game should not be scheduled for an update.
// This makes the inflight game metric more robust.
require.Len(t, workQueue, 2, "should schedule all games")
// Game 1 progresses and is still in progress
// Game 2 progresses and is now resolved
......@@ -249,6 +252,10 @@ func (g *stubGame) ProgressGame(_ context.Context) types.GameStatus {
return g.status
}
func (g *stubGame) Status() types.GameStatus {
return g.status
}
type createdGames struct {
t *testing.T
createCompleted common.Address
......
......@@ -15,11 +15,16 @@ type SchedulerMetricer interface {
RecordGamesStatus(inProgress, defenderWon, challengerWon int)
RecordGameUpdateScheduled()
RecordGameUpdateCompleted()
IncActiveExecutors()
DecActiveExecutors()
IncIdleExecutors()
DecIdleExecutors()
}
type Scheduler struct {
logger log.Logger
coordinator *coordinator
m SchedulerMetricer
maxConcurrency uint
scheduleQueue chan []common.Address
jobQueue chan job
......@@ -40,6 +45,7 @@ func NewScheduler(logger log.Logger, m SchedulerMetricer, disk DiskManager, maxC
return &Scheduler{
logger: logger,
m: m,
coordinator: newCoordinator(logger, m, jobQueue, resultQueue, createPlayer, disk),
maxConcurrency: maxConcurrency,
scheduleQueue: scheduleQueue,
......@@ -48,13 +54,24 @@ func NewScheduler(logger log.Logger, m SchedulerMetricer, disk DiskManager, maxC
}
}
func (s *Scheduler) ThreadActive() {
s.m.IncActiveExecutors()
s.m.DecIdleExecutors()
}
func (s *Scheduler) ThreadIdle() {
s.m.IncIdleExecutors()
s.m.DecActiveExecutors()
}
func (s *Scheduler) Start(ctx context.Context) {
ctx, cancel := context.WithCancel(ctx)
s.cancel = cancel
for i := uint(0); i < s.maxConcurrency; i++ {
s.m.IncIdleExecutors()
s.wg.Add(1)
go progressGames(ctx, s.jobQueue, s.resultQueue, &s.wg)
go progressGames(ctx, s.jobQueue, s.resultQueue, &s.wg, s.ThreadActive, s.ThreadIdle)
}
s.wg.Add(1)
......
......@@ -10,6 +10,7 @@ import (
type GamePlayer interface {
ProgressGame(ctx context.Context) types.GameStatus
Status() types.GameStatus
}
type DiskManager interface {
......
......@@ -8,15 +8,17 @@ import (
// progressGames accepts jobs from in channel, calls ProgressGame on the job.player and returns the job
// with updated job.resolved via the out channel.
// The loop exits when the ctx is done. wg.Done() is called when the function returns.
func progressGames(ctx context.Context, in <-chan job, out chan<- job, wg *sync.WaitGroup) {
func progressGames(ctx context.Context, in <-chan job, out chan<- job, wg *sync.WaitGroup, threadActive, threadIdle func()) {
defer wg.Done()
for {
select {
case <-ctx.Done():
return
case j := <-in:
threadActive()
j.status = j.player.ProgressGame(ctx)
out <- j
threadIdle()
}
}
}
......@@ -7,6 +7,7 @@ import (
"time"
"github.com/ethereum-optimism/optimism/op-challenger/game/types"
"github.com/ethereum-optimism/optimism/op-e2e/e2eutils/wait"
"github.com/stretchr/testify/require"
)
......@@ -15,18 +16,32 @@ func TestWorkerShouldProcessJobsUntilContextDone(t *testing.T) {
in := make(chan job, 2)
out := make(chan job, 2)
ms := &metricSink{}
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
var wg sync.WaitGroup
wg.Add(1)
go progressGames(ctx, in, out, &wg)
go progressGames(ctx, in, out, &wg, ms.ThreadActive, ms.ThreadIdle)
in <- job{
player: &stubPlayer{status: types.GameStatusInProgress},
}
waitErr := wait.For(context.Background(), 100*time.Millisecond, func() (bool, error) {
return ms.activeCalls >= 1, nil
})
require.NoError(t, waitErr)
require.Equal(t, ms.activeCalls, 1)
require.Equal(t, ms.idleCalls, 1)
in <- job{
player: &stubPlayer{status: types.GameStatusDefenderWon},
}
waitErr = wait.For(context.Background(), 100*time.Millisecond, func() (bool, error) {
return ms.activeCalls >= 2, nil
})
require.NoError(t, waitErr)
require.Equal(t, ms.activeCalls, 2)
require.Equal(t, ms.idleCalls, 2)
result1 := readWithTimeout(t, out)
result2 := readWithTimeout(t, out)
......@@ -39,6 +54,19 @@ func TestWorkerShouldProcessJobsUntilContextDone(t *testing.T) {
wg.Wait()
}
type metricSink struct {
activeCalls int
idleCalls int
}
func (m *metricSink) ThreadActive() {
m.activeCalls++
}
func (m *metricSink) ThreadIdle() {
m.idleCalls++
}
type stubPlayer struct {
status types.GameStatus
}
......@@ -47,6 +75,10 @@ func (s *stubPlayer) ProgressGame(ctx context.Context) types.GameStatus {
return s.status
}
func (s *stubPlayer) Status() types.GameStatus {
return s.status
}
func readWithTimeout[T any](t *testing.T, ch <-chan T) T {
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
......
......@@ -25,10 +25,17 @@ type Metricer interface {
RecordGameMove()
RecordCannonExecutionTime(t float64)
RecordGameClaimCount(addr string, count int)
RecordGamesStatus(inProgress, defenderWon, challengerWon int)
RecordGameUpdateScheduled()
RecordGameUpdateCompleted()
IncActiveExecutors()
DecActiveExecutors()
IncIdleExecutors()
DecIdleExecutors()
}
type Metrics struct {
......@@ -41,11 +48,15 @@ type Metrics struct {
info prometheus.GaugeVec
up prometheus.Gauge
executors prometheus.GaugeVec
moves prometheus.Counter
steps prometheus.Counter
cannonExecutionTime prometheus.Histogram
gameClaimCount prometheus.GaugeVec
trackedGames prometheus.GaugeVec
inflightGames prometheus.Gauge
}
......@@ -75,6 +86,13 @@ func NewMetrics() *Metrics {
Name: "up",
Help: "1 if the op-challenger has finished starting up",
}),
executors: *factory.NewGaugeVec(prometheus.GaugeOpts{
Namespace: Namespace,
Name: "executors",
Help: "Number of active and idle executors",
}, []string{
"status",
}),
moves: factory.NewCounter(prometheus.CounterOpts{
Namespace: Namespace,
Name: "moves",
......@@ -93,6 +111,13 @@ func NewMetrics() *Metrics {
[]float64{1.0, 10.0},
prometheus.ExponentialBuckets(30.0, 2.0, 14)...),
}),
gameClaimCount: *factory.NewGaugeVec(prometheus.GaugeOpts{
Namespace: Namespace,
Name: "game_claim_count",
Help: "Number of claims in the game",
}, []string{
"game_address",
}),
trackedGames: *factory.NewGaugeVec(prometheus.GaugeOpts{
Namespace: Namespace,
Name: "tracked_games",
......@@ -149,6 +174,26 @@ func (m *Metrics) RecordCannonExecutionTime(t float64) {
m.cannonExecutionTime.Observe(t)
}
func (m *Metrics) IncActiveExecutors() {
m.executors.WithLabelValues("active").Inc()
}
func (m *Metrics) DecActiveExecutors() {
m.executors.WithLabelValues("active").Dec()
}
func (m *Metrics) IncIdleExecutors() {
m.executors.WithLabelValues("idle").Inc()
}
func (m *Metrics) DecIdleExecutors() {
m.executors.WithLabelValues("idle").Dec()
}
func (m *Metrics) RecordGameClaimCount(addr string, count int) {
m.gameClaimCount.With(prometheus.Labels{"game_address": addr}).Set(float64(count))
}
func (m *Metrics) RecordGamesStatus(inProgress, defenderWon, challengerWon int) {
m.trackedGames.WithLabelValues("in_progress").Set(float64(inProgress))
m.trackedGames.WithLabelValues("defender_won").Set(float64(defenderWon))
......
......@@ -22,3 +22,10 @@ func (*NoopMetricsImpl) RecordGamesStatus(inProgress, defenderWon, challengerWon
func (*NoopMetricsImpl) RecordGameUpdateScheduled() {}
func (*NoopMetricsImpl) RecordGameUpdateCompleted() {}
func (*NoopMetricsImpl) IncActiveExecutors() {}
func (*NoopMetricsImpl) DecActiveExecutors() {}
func (*NoopMetricsImpl) IncIdleExecutors() {}
func (*NoopMetricsImpl) DecIdleExecutors() {}
func (*NoopMetricsImpl) RecordGameClaimCount(addr string, count int) {}
......@@ -56,11 +56,7 @@ log = logging.getLogger(__name__)
def main():
patterns = sys.argv[1].split(',')
# temporarily only run indexer tests if indexer is changed because the tests are flaky
if len(patterns) != 1 or patterns[0] != "indexer":
patterns = patterns + REBUILD_ALL_PATTERNS
patterns = patterns + REBUILD_ALL_PATTERNS
fp = os.path.realpath(__file__)
monorepo_path = os.path.realpath(os.path.join(fp, '..', '..'))
......
......@@ -184,7 +184,7 @@ importers:
version: 2.17.2(ts-node@10.9.1)(typescript@5.2.2)
ts-node:
specifier: ^10.9.1
version: 10.9.1(@types/node@20.6.2)(typescript@5.2.2)
version: 10.9.1(@types/node@20.6.3)(typescript@5.2.2)
tsx:
specifier: ^3.12.7
version: 3.12.7
......@@ -3711,20 +3711,20 @@ packages:
/@types/bn.js@4.11.6:
resolution: {integrity: sha512-pqr857jrp2kPuO9uRjZ3PwnJTjoQy+fcdxvBTvHm6dkmEL9q+hDD/2j/0ELOBPtPnS8LjCX0gI9nbl8lVkadpg==}
dependencies:
'@types/node': 20.6.2
'@types/node': 20.6.3
dev: true
/@types/bn.js@5.1.0:
resolution: {integrity: sha512-QSSVYj7pYFN49kW77o2s9xTCwZ8F2xLbjLLSEVh8D2F4JUhZtPAGOFLTD+ffqksBx/u4cE/KImFjyhqCjn/LIA==}
dependencies:
'@types/node': 20.6.2
'@types/node': 20.6.3
dev: true
/@types/body-parser@1.19.1:
resolution: {integrity: sha512-a6bTJ21vFOGIkwM0kzh9Yr89ziVxq4vYH2fQ6N8AeipEzai/cFK6aGMArIkUeIdRIgpwQa+2bXiLuUJCpSf2Cg==}
dependencies:
'@types/connect': 3.4.35
'@types/node': 20.6.2
'@types/node': 20.6.3
dev: true
/@types/chai-as-promised@7.1.5:
......@@ -3750,7 +3750,7 @@ packages:
/@types/connect@3.4.35:
resolution: {integrity: sha512-cdeYyv4KWoEgpBISTxWvqYsVy444DOqehiF3fM3ne10AmJ62RSyNkUnxMJXHQWRQQX2eR94m5y1IZyDwBjV9FQ==}
dependencies:
'@types/node': 20.6.2
'@types/node': 20.6.3
/@types/dateformat@5.0.0:
resolution: {integrity: sha512-SZg4JdHIWHQGEokbYGZSDvo5wA4TLYPXaqhigs/wH+REDOejcJzgH+qyY+HtEUtWOZxEUkbhbdYPqQDiEgrXeA==}
......@@ -3764,7 +3764,7 @@ packages:
/@types/express-serve-static-core@4.17.35:
resolution: {integrity: sha512-wALWQwrgiB2AWTT91CB62b6Yt0sNHpznUXeZEcnPU3DRdlDIz74x8Qg1UUYKSVFi+va5vKOLYRBI1bRKiLLKIg==}
dependencies:
'@types/node': 20.6.2
'@types/node': 20.6.3
'@types/qs': 6.9.7
'@types/range-parser': 1.2.4
'@types/send': 0.17.1
......@@ -3810,7 +3810,7 @@ packages:
dependencies:
'@types/abstract-leveldown': 5.0.2
'@types/level-errors': 3.0.0
'@types/node': 20.6.2
'@types/node': 20.6.3
dev: true
/@types/lru-cache@5.1.1:
......@@ -3842,7 +3842,7 @@ packages:
/@types/mkdirp@0.5.2:
resolution: {integrity: sha512-U5icWpv7YnZYGsN4/cmh3WD2onMY0aJIiTE6+51TwJCttdHvtCYmkBNOobHlXwrJRL0nkH9jH4kD+1FAdMN4Tg==}
dependencies:
'@types/node': 20.6.2
'@types/node': 20.6.3
dev: true
/@types/mocha@10.0.1:
......@@ -3861,7 +3861,7 @@ packages:
/@types/node-fetch@2.6.4:
resolution: {integrity: sha512-1ZX9fcN4Rvkvgv4E6PAY5WXUFWFcRWxZa3EW83UjycOB9ljJCedb2CupIP4RZMEwF/M3eTcCihbBRgwtGbg5Rg==}
dependencies:
'@types/node': 20.6.2
'@types/node': 20.6.3
form-data: 3.0.1
dev: true
......@@ -3878,6 +3878,10 @@ packages:
/@types/node@20.6.2:
resolution: {integrity: sha512-Y+/1vGBHV/cYk6OI1Na/LHzwnlNCAfU3ZNGrc1LdRe/LAIbdDPTTv/HU3M7yXN448aTVDq3eKRm2cg7iKLb8gw==}
dev: true
/@types/node@20.6.3:
resolution: {integrity: sha512-HksnYH4Ljr4VQgEy2lTStbCKv/P590tmPe5HqOnv9Gprffgv5WXAY+Y5Gqniu0GGqeTCUdBnzC3QSrzPkBkAMA==}
/@types/normalize-package-data@2.4.1:
resolution: {integrity: sha512-Gj7cI7z+98M282Tqmp2K5EIsoouUEzbBJhQQzDE3jSIRk6r9gsz0oUokqIUR4u1R3dMHo0pDHM7sNOHyhulypw==}
......@@ -3889,7 +3893,7 @@ packages:
/@types/pbkdf2@3.1.0:
resolution: {integrity: sha512-Cf63Rv7jCQ0LaL8tNXmEyqTHuIJxRdlS5vMh1mj5voN4+QFhVZnlZruezqpWYDiJ8UTzhP0VmeLXCmBk66YrMQ==}
dependencies:
'@types/node': 20.6.2
'@types/node': 20.6.3
dev: true
/@types/pino-multi-stream@5.1.3:
......@@ -3907,13 +3911,13 @@ packages:
/@types/pino-std-serializers@2.4.1:
resolution: {integrity: sha512-17XcksO47M24IVTVKPeAByWUd3Oez7EbIjXpSbzMPhXVzgjGtrOa49gKBwxH9hb8dKv58OelsWQ+A1G1l9S3wQ==}
dependencies:
'@types/node': 20.6.2
'@types/node': 20.6.3
dev: true
/@types/pino@6.3.11:
resolution: {integrity: sha512-S7+fLONqSpHeW9d7TApUqO6VN47KYgOXhCNKwGBVLHObq8HhaAYlVqUNdfnvoXjCMiwE5xcPm/5R2ZUh8bgaXQ==}
dependencies:
'@types/node': 20.6.2
'@types/node': 20.6.3
'@types/pino-pretty': 4.7.1
'@types/pino-std-serializers': 2.4.1
sonic-boom: 2.8.0
......@@ -3959,7 +3963,7 @@ packages:
/@types/readable-stream@2.3.15:
resolution: {integrity: sha512-oM5JSKQCcICF1wvGgmecmHldZ48OZamtMxcGGVICOJA8o8cahXC1zEVAif8iwoc5j8etxFaRFnf095+CDsuoFQ==}
dependencies:
'@types/node': 20.6.2
'@types/node': 20.6.3
safe-buffer: 5.1.2
dev: true
......@@ -3970,7 +3974,7 @@ packages:
/@types/secp256k1@4.0.3:
resolution: {integrity: sha512-Da66lEIFeIz9ltsdMZcpQvmrmmoqrfju8pm1BH8WbYjZSwUgCwXLb9C+9XYogwBITnbsSaMdVPb2ekf7TV+03w==}
dependencies:
'@types/node': 20.6.2
'@types/node': 20.6.3
dev: true
/@types/seedrandom@3.0.1:
......@@ -3989,14 +3993,14 @@ packages:
resolution: {integrity: sha512-Cwo8LE/0rnvX7kIIa3QHCkcuF21c05Ayb0ZfxPiv0W8VRiZiNW/WuRupHKpqqGVGf7SUA44QSOUKaEd9lIrd/Q==}
dependencies:
'@types/mime': 1.3.2
'@types/node': 20.6.2
'@types/node': 20.6.3
dev: true
/@types/serve-static@1.13.10:
resolution: {integrity: sha512-nCkHGI4w7ZgAdNkrEu0bv+4xNV/XDqW+DydknebMOQwkpDGx8G+HTlj7R7ABI8i8nKxVw0wtKPi1D+lPOkh4YQ==}
dependencies:
'@types/mime': 1.3.2
'@types/node': 20.6.2
'@types/node': 20.6.3
dev: true
/@types/sinon-chai@3.2.5:
......@@ -4033,18 +4037,18 @@ packages:
/@types/ws@7.4.7:
resolution: {integrity: sha512-JQbbmxZTZehdc2iszGKs5oC3NFnjeay7mtAWrdt7qNtAVK0g19muApzAy4bm9byz79xa2ZnO/BOBC2R8RC5Lww==}
dependencies:
'@types/node': 20.6.2
'@types/node': 20.6.3
/@types/ws@8.5.3:
resolution: {integrity: sha512-6YOoWjruKj1uLf3INHH7D3qTXwFfEsg1kf3c0uDdSBJwfa/llkwIjrAGV7j7mVgGNbzTQ3HiHKKDXl6bJPD97w==}
dependencies:
'@types/node': 20.6.2
'@types/node': 20.6.3
dev: false
/@types/ws@8.5.5:
resolution: {integrity: sha512-lwhs8hktwxSjf9UaZ9tG5M03PGogvFaH8gUgLNbN9HKIg0dvv6q+gkSuJ8HN4/VbyxkuLzCjlN7GquQ0gUJfIg==}
dependencies:
'@types/node': 20.6.2
'@types/node': 20.6.3
dev: true
/@typescript-eslint/eslint-plugin@6.7.0(@typescript-eslint/parser@6.4.0)(eslint@8.49.0)(typescript@5.1.6):
......@@ -9201,7 +9205,7 @@ packages:
solc: 0.7.3(debug@4.3.4)
source-map-support: 0.5.21
stacktrace-parser: 0.1.10
ts-node: 10.9.1(@types/node@20.6.2)(typescript@5.2.2)
ts-node: 10.9.1(@types/node@20.6.3)(typescript@5.2.2)
tsort: 0.0.1
typescript: 5.2.2
undici: 5.24.0
......@@ -13976,7 +13980,7 @@ packages:
yn: 3.1.1
dev: true
/ts-node@10.9.1(@types/node@20.6.2)(typescript@5.2.2):
/ts-node@10.9.1(@types/node@20.6.3)(typescript@5.2.2):
resolution: {integrity: sha512-NtVysVPkxxrwFGUUxGYhfux8k78pQB3JqYBXlLRZgdGUqTO5wU/UyHop5p70iEbGhB7q5KmiZiU0Y3KlJrScEw==}
hasBin: true
peerDependencies:
......@@ -13995,7 +13999,7 @@ packages:
'@tsconfig/node12': 1.0.11
'@tsconfig/node14': 1.0.3
'@tsconfig/node16': 1.0.4
'@types/node': 20.6.2
'@types/node': 20.6.3
acorn: 8.10.0
acorn-walk: 8.2.0
arg: 4.1.3
......
......@@ -35,7 +35,8 @@ services:
environment:
- GF_SECURITY_ADMIN_PASSWORD=${GRAFANA_ADMIN_PWD}
volumes:
- ./datasources.yml:/etc/grafana/provisioning/datasources/datasources.yaml
- ./grafana/provisioning:/etc/grafana/provisioning
- ./grafana/dashboards:/var/lib/grafana/dashboards
security_opt:
- "no-new-privileges:true"
......
{
"annotations": {
"list": [
{
"builtIn": 1,
"datasource": {
"type": "grafana",
"uid": "-- Grafana --"
},
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
"name": "Annotations & Alerts",
"type": "dashboard"
}
]
},
"editable": true,
"fiscalYearStartMonth": 0,
"graphTooltip": 0,
"links": [],
"liveNow": false,
"panels": [
{
"datasource": {
"type": "prometheus",
"uid": "PBFA97CFB590B2093"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "thresholds"
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "red",
"value": null
},
{
"color": "yellow",
"value": 1
},
{
"color": "green",
"value": 4
}
]
}
},
"overrides": []
},
"gridPos": {
"h": 5,
"w": 6,
"x": 0,
"y": 0
},
"id": 1,
"options": {
"orientation": "auto",
"reduceOptions": {
"calcs": [
"lastNotNull"
],
"fields": "",
"values": false
},
"showThresholdLabels": false,
"showThresholdMarkers": true
},
"pluginVersion": "10.1.2",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "PBFA97CFB590B2093"
},
"disableTextWrap": false,
"editorMode": "builder",
"expr": "metamask_tx_success",
"fullMetaSearch": false,
"includeNullMetadata": true,
"instant": false,
"legendFormat": "__auto",
"range": true,
"refId": "A",
"useBackend": false
}
],
"title": "Number of Successful Transaction Since Last Failure",
"type": "gauge"
},
{
"datasource": {
"type": "prometheus",
"uid": "PBFA97CFB590B2093"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "thresholds"
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "text",
"value": null
},
{
"color": "red",
"value": 1
}
]
}
},
"overrides": []
},
"gridPos": {
"h": 5,
"w": 6,
"x": 6,
"y": 0
},
"id": 2,
"options": {
"orientation": "auto",
"reduceOptions": {
"calcs": [
"lastNotNull"
],
"fields": "",
"values": false
},
"showThresholdLabels": false,
"showThresholdMarkers": true
},
"pluginVersion": "10.1.2",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "PBFA97CFB590B2093"
},
"disableTextWrap": false,
"editorMode": "builder",
"expr": "metamask_tx_failure",
"fullMetaSearch": false,
"includeNullMetadata": true,
"instant": false,
"legendFormat": "__auto",
"range": true,
"refId": "A",
"useBackend": false
}
],
"title": "Number of Failed Transactions Since Last Success",
"type": "gauge"
}
],
"refresh": "5s",
"schemaVersion": 38,
"style": "dark",
"tags": [],
"templating": {
"list": []
},
"time": {
"from": "now-6h",
"to": "now"
},
"timepicker": {},
"timezone": "",
"title": "UFM: Metamask",
"uid": "f66f7076-c724-4f81-8ff9-58d6d99f2716",
"version": 1,
"weekStart": ""
}
apiVersion: 1
providers:
- name: 'default'
orgId: 1
folder: ''
type: file
disableDeletion: false
options:
path: /var/lib/grafana/dashboards
# User Facing Monitoring - Metamask Tests
## Running Locally
### Building Docker Image
```bash
docker build -t ufm-test-service-metamask .
```
### Running the Docker Container on MacOS
The following steps were taken from [here](https://www.oddbird.net/2022/11/30/headed-playwright-in-docker/#macos)
Apple’s operating system doesn’t include a built-in XServer, but we can use [XQuartz](https://www.xquartz.org/) to provide one:
1. Install XQuartz: `brew install --cask xquartz``
2. Open XQuartz, go to `Preferences -> Security`, and check `Allow connections from network clients`
3. Restart your computer (restarting XQuartz might not be enough)
4. Start XQuartz by executing `xhost +localhost` in your terminal
5. Open Docker Desktop and edit settings to give access to `/tmp/.X11-unix` in `Preferences -> Resources -> File sharing`
Once XQuartz is running with the right permissions, you can populate the environment variable and socket Docker args:
```bash
docker run --rm -it \
-e DISPLAY=host.docker.internal:0 \
-v /tmp/.X11-unix:/tmp/.X11-unix \
ufm-test-service-metamask
```
......@@ -20,10 +20,14 @@
"devDependencies": {
"@metamask/test-dapp": "^7.1.0",
"@playwright/test": "1.37.1",
"@synthetixio/synpress": "3.7.2-beta.5",
"@synthetixio/synpress": "3.7.2-beta.7",
"dotenv": "^16.3.1",
"static-server": "^2.2.1",
"typescript": "^5.1.6",
"viem": "^1.10.8"
},
"dependencies": {
"prom-client": "^14.2.0",
"zod": "^3.22.2"
}
}
This diff is collapsed.
import 'dotenv/config'
import { z } from 'zod'
import metamask from '@synthetixio/synpress/commands/metamask.js'
import { expect, test, type Page } from '@playwright/test'
import { mnemonicToAccount, privateKeyToAccount } from 'viem/accounts'
import { testWithSynpress } from './testWithSynpressUtil'
import {
getMetamaskTxCounterValue,
incrementMetamaskTxCounter,
setMetamaskTxCounter,
} from './prometheusUtils'
const env = z.object({
METAMASK_SECRET_WORDS_OR_PRIVATEKEY: z.string(),
OP_GOERLI_RPC_URL: z.string().url(),
METAMASK_DAPP_URL: z.string().url()
}).parse(process.env)
const expectedSender =
process.env.METAMASK_SECRET_WORDS_OR_PRIVATEKEY?.startsWith('0x')
env.METAMASK_SECRET_WORDS_OR_PRIVATEKEY?.startsWith('0x')
? privateKeyToAccount(
process.env.METAMASK_SECRET_WORDS_OR_PRIVATEKEY as `0x${string}`
env.METAMASK_SECRET_WORDS_OR_PRIVATEKEY as `0x${string}`
).address.toLowerCase()
: mnemonicToAccount(
process.env.METAMASK_SECRET_WORDS_OR_PRIVATEKEY as string
env.METAMASK_SECRET_WORDS_OR_PRIVATEKEY as string
).address.toLowerCase()
const expectedRecipient = '0x8fcfbe8953433fd1f2e8375ee99057833e4e1e9e'
......@@ -35,7 +47,7 @@ testWithSynpress('Add OP Goerli network', async () => {
name: 'op-goerli',
rpcUrls: {
default: {
http: [process.env.OP_GOERLI_RPC_URL],
http: [env.OP_GOERLI_RPC_URL],
},
},
id: '420',
......@@ -49,13 +61,26 @@ testWithSynpress('Add OP Goerli network', async () => {
},
})
await expect(sharedPage.locator('#chainId')).toHaveText(expectedChainId)
try {
await expect(sharedPage.locator('#chainId')).toHaveText(expectedChainId)
} catch (error) {
await setMetamaskTxCounter(true, 0)
await incrementMetamaskTxCounter(false)
throw error
}
})
test(`Connect wallet with ${expectedSender}`, async () => {
await sharedPage.click('#connectButton')
await metamask.acceptAccess()
await expect(sharedPage.locator('#accounts')).toHaveText(expectedSender)
try {
await expect(sharedPage.locator('#accounts')).toHaveText(expectedSender)
} catch (error) {
await setMetamaskTxCounter(true, 0)
await incrementMetamaskTxCounter(false)
throw error
}
})
test('Send an EIP-1559 transaciton and verfiy success', async () => {
......@@ -76,17 +101,14 @@ test('Send an EIP-1559 transaciton and verfiy success', async () => {
})
})
await metamask.confirmTransaction()
await metamask.confirmTransactionAndWaitForMining()
const txHash = await txHashPromise
// Waiting for Infura (Metamask given provider) to index our transaction
await sharedPage.waitForTimeout(10_000)
// Metamask test dApp allows us access to the Metamask RPC provider via loading this URL.
// The RPC reponse will be populated onto the page that's loaded.
// More info here: https://github.com/MetaMask/test-dapp/tree/main#usage
await sharedPage.goto(
`${process.env.METAMASK_DAPP_URL}/request.html?method=eth_getTransactionReceipt&params=["${txHash}"]`
`${env.METAMASK_DAPP_URL}/request.html?method=eth_getTransactionReceipt&params=["${txHash}"]`
)
// Waiting for RPC response to be populated on the page
......@@ -98,5 +120,14 @@ test('Send an EIP-1559 transaciton and verfiy success', async () => {
''
)
)
expect(transaction.status).toBe('0x1')
try {
expect(transaction.status).toBe('0x1')
await setMetamaskTxCounter(false, 0)
await incrementMetamaskTxCounter(true)
} catch (error) {
await setMetamaskTxCounter(true, 0)
await incrementMetamaskTxCounter(false)
throw error
}
})
import 'dotenv/config'
import { z } from 'zod'
import { Counter, Pushgateway } from 'prom-client'
const env = z
.object({
PROMETHEUS_SERVER_URL: z.string().url(),
PROMETHEUS_PUSHGATEWAY_URL: z.string().url(),
})
.parse(process.env)
const txSuccessMetricName = 'metamask_tx_success'
const txFailureMetricName = 'metamask_tx_failuree'
const txSuccessCounter = new Counter({
name: txSuccessMetricName,
help: 'A counter signifying the number of successful transactions sent with Metamask since last failure',
})
const txFailureCounter = new Counter({
name: txFailureMetricName,
help: 'A counter signifying the number of failed transactions sent with Metamask since last successful transaction',
})
export const getMetamaskTxCounterValue = async (isSuccess: boolean) => {
const metricName = isSuccess ? txSuccessMetricName : txFailureMetricName
const prometheusMetricQuery = `${env.PROMETHEUS_SERVER_URL}/api/v1/query?query=${metricName}`
const response = await fetch(prometheusMetricQuery)
if (!response.ok) {
console.error(response.status)
console.error(response.statusText)
throw new Error(`Failed to fetch metric from: ${prometheusMetricQuery}`)
}
// The following is an example of the expect response from prometheusMetricQuery
// for response.json().data.result[0]:
// [
// {
// metric: {
// __name__: 'metamask_tx_success',
// exported_job: 'metamask_tx_count',
// instance: 'pushgateway:9091',
// job: 'pushgateway'
// },
// value: [ 1695250414.474, '0' ]
// }
// ]
try {
const responseJson = z
.object({
data: z.object({
result: z.array(
z.object({
value: z.tuple([
z.number(),
z.number().or(z.string().transform((value) => parseInt(value))),
]),
})
),
}),
})
.parse(await response.json())
return responseJson.data.result[0].value[1]
} catch (error) {
if (
error.message === "Cannot read properties of undefined (reading 'value')"
) {
console.warn(`No data found for metric ${metricName} in Prometheus`)
return undefined
}
throw error
}
}
export const setMetamaskTxCounter = async (
isSuccess: boolean,
valueToSetTo: number
) => {
const metricName = isSuccess ? txSuccessMetricName : txFailureMetricName
const txCounter = isSuccess ? txSuccessCounter : txFailureCounter
txCounter.reset()
console.log(`Setting ${metricName} to ${valueToSetTo}`)
txCounter.inc(valueToSetTo)
const pushGateway = new Pushgateway(env.PROMETHEUS_PUSHGATEWAY_URL)
await pushGateway.pushAdd({ jobName: 'metamask_tx_count' })
}
export const incrementMetamaskTxCounter = async (isSuccess: boolean) => {
const metricName = isSuccess ? txSuccessMetricName : txFailureMetricName
const currentMetricValue = (await getMetamaskTxCounterValue(true)) ?? 0
console.log(
`Current value of ${metricName} is ${currentMetricValue}, incrementing to ${
currentMetricValue + 1
}`
)
await setMetamaskTxCounter(isSuccess, currentMetricValue + 1)
}
global:
scrape_interval: 5s
scrape_interval: 2s
scrape_configs:
- job_name: 'pushgateway'
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment