Commit 837a5de7 authored by Sabnock01's avatar Sabnock01

Merge branch 'develop' of github.com:Sabnock01/optimism into sabnock/issue-6741

parents be0860ae cb42a610
This diff is collapsed.
......@@ -7,8 +7,8 @@ toolchain go1.21.1
require github.com/ethereum-optimism/optimism v0.0.0
require (
golang.org/x/crypto v0.13.0 // indirect
golang.org/x/sys v0.12.0 // indirect
golang.org/x/crypto v0.14.0 // indirect
golang.org/x/sys v0.13.0 // indirect
)
replace github.com/ethereum-optimism/optimism v0.0.0 => ../../..
......@@ -4,9 +4,9 @@ github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZb
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
golang.org/x/crypto v0.13.0 h1:mvySKfSWJ+UKUii46M40LOvyWfN0s2U+46/jDd0e6Ck=
golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc=
golang.org/x/sys v0.12.0 h1:CM0HF96J0hcLAwsHPJZjfdNzs0gftsLfgKt57wWHJ0o=
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/crypto v0.14.0 h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc=
golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4=
golang.org/x/sys v0.13.0 h1:Af8nKPmuFypiUBjVoU9V20FiaFXOcuZI21p0ycVYYGE=
golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
......@@ -39,10 +39,10 @@ require (
github.com/prometheus/client_golang v1.17.0
github.com/stretchr/testify v1.8.4
github.com/urfave/cli/v2 v2.25.7
golang.org/x/crypto v0.13.0
golang.org/x/crypto v0.14.0
golang.org/x/exp v0.0.0-20230905200255-921286631fa9
golang.org/x/sync v0.3.0
golang.org/x/term v0.12.0
golang.org/x/sync v0.4.0
golang.org/x/term v0.13.0
golang.org/x/time v0.3.0
gorm.io/driver/postgres v1.5.2
gorm.io/gorm v1.25.4
......@@ -197,7 +197,7 @@ require (
go.uber.org/zap v1.25.0 // indirect
golang.org/x/mod v0.12.0 // indirect
golang.org/x/net v0.15.0 // indirect
golang.org/x/sys v0.12.0 // indirect
golang.org/x/sys v0.13.0 // indirect
golang.org/x/text v0.13.0 // indirect
golang.org/x/tools v0.13.0 // indirect
google.golang.org/protobuf v1.31.0 // indirect
......
......@@ -881,8 +881,8 @@ golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2/go.mod h1:T9bdIzuCu7OtxOm
golang.org/x/crypto v0.0.0-20210616213533-5ff15b29337e/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.13.0 h1:mvySKfSWJ+UKUii46M40LOvyWfN0s2U+46/jDd0e6Ck=
golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc=
golang.org/x/crypto v0.14.0 h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc=
golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20230905200255-921286631fa9 h1:GoHiUyI/Tp2nVkLI2mCxVkOjsbSXD66ic0XW0js0R9g=
golang.org/x/exp v0.0.0-20230905200255-921286631fa9/go.mod h1:S2oDrQGGwySpoQPVqRShND87VCbxmc6bL1Yd2oYrm6k=
......@@ -949,8 +949,8 @@ golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJ
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.3.0 h1:ftCYgMx6zT/asHUrPw8BLLscYtGznsLAnjq5RH9P66E=
golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
golang.org/x/sync v0.4.0 h1:zxkM55ReGkDlKSM+Fu41A+zmbZuaPVbGMzvvdUPznYQ=
golang.org/x/sync v0.4.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
golang.org/x/sys v0.0.0-20180810173357-98c5dad5d1a0/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
......@@ -1006,13 +1006,13 @@ golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.12.0 h1:CM0HF96J0hcLAwsHPJZjfdNzs0gftsLfgKt57wWHJ0o=
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.13.0 h1:Af8nKPmuFypiUBjVoU9V20FiaFXOcuZI21p0ycVYYGE=
golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.12.0 h1:/ZfYdc3zq+q02Rv9vGqTeSItdzZTSNDmfTi0mBAuidU=
golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU=
golang.org/x/term v0.13.0 h1:bb+I9cTfFazGW51MZqBVmZy7+JEJMouUHTUSKVQLBek=
golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
......
......@@ -3,7 +3,12 @@ DO $$
BEGIN
IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'uint256') THEN
CREATE DOMAIN UINT256 AS NUMERIC
CHECK (VALUE >= 0 AND VALUE < 2^256 and SCALE(VALUE) = 0);
CHECK (VALUE >= 0 AND VALUE < POWER(CAST(2 AS NUMERIC), CAST(256 AS NUMERIC)) AND SCALE(VALUE) = 0);
ELSE
-- To remain backwards compatible, drop the old constraint and re-add.
ALTER DOMAIN UINT256 DROP CONSTRAINT uint256_check;
ALTER DOMAIN UINT256 ADD
CHECK (VALUE >= 0 AND VALUE < POWER(CAST(2 AS NUMERIC), CAST(256 AS NUMERIC)) AND SCALE(VALUE) = 0);
END IF;
END $$;
......
......@@ -37,7 +37,7 @@ var Flags = []cli.Flag{
}
func init() {
Flags = append(Flags, flags.P2pFlags...)
Flags = append(Flags, flags.P2PFlags(envVarPrefix)...)
Flags = append(Flags, opmetrics.CLIFlags(envVarPrefix)...)
Flags = append(Flags, oplog.CLIFlags(envVarPrefix)...)
}
......@@ -309,7 +309,7 @@ var optionalFlags = []cli.Flag{
var Flags []cli.Flag
func init() {
optionalFlags = append(optionalFlags, P2pFlags...)
optionalFlags = append(optionalFlags, P2PFlags(EnvVarPrefix)...)
optionalFlags = append(optionalFlags, oplog.CLIFlags(EnvVarPrefix)...)
Flags = append(requiredFlags, optionalFlags...)
}
......
This diff is collapsed.
......@@ -29,7 +29,7 @@ import (
func NewConfig(ctx *cli.Context, rollupCfg *rollup.Config) (*p2p.Config, error) {
conf := &p2p.Config{}
if ctx.Bool(flags.DisableP2P.Name) {
if ctx.Bool(flags.DisableP2PName) {
conf.DisableP2P = true
return conf, nil
}
......@@ -64,7 +64,7 @@ func NewConfig(ctx *cli.Context, rollupCfg *rollup.Config) (*p2p.Config, error)
return nil, fmt.Errorf("failed to load banning option: %w", err)
}
conf.EnableReqRespSync = ctx.Bool(flags.SyncReqRespFlag.Name)
conf.EnableReqRespSync = ctx.Bool(flags.SyncReqRespName)
return conf, nil
}
......@@ -84,13 +84,13 @@ func validatePort(p uint) (uint16, error) {
// loadScoringParams loads the peer scoring options from the CLI context.
func loadScoringParams(conf *p2p.Config, ctx *cli.Context, rollupCfg *rollup.Config) error {
scoringLevel := ctx.String(flags.Scoring.Name)
scoringLevel := ctx.String(flags.ScoringName)
// Check old names for backwards compatibility
if scoringLevel == "" {
scoringLevel = ctx.String(flags.PeerScoring.Name)
scoringLevel = ctx.String(flags.PeerScoringName)
}
if scoringLevel == "" {
scoringLevel = ctx.String(flags.TopicScoring.Name)
scoringLevel = ctx.String(flags.TopicScoringName)
}
if scoringLevel != "" {
params, err := p2p.GetScoringParams(scoringLevel, rollupCfg)
......@@ -105,14 +105,14 @@ func loadScoringParams(conf *p2p.Config, ctx *cli.Context, rollupCfg *rollup.Con
// loadBanningOptions loads whether or not to ban peers from the CLI context.
func loadBanningOptions(conf *p2p.Config, ctx *cli.Context) error {
conf.BanningEnabled = ctx.Bool(flags.Banning.Name)
conf.BanningThreshold = ctx.Float64(flags.BanningThreshold.Name)
conf.BanningDuration = ctx.Duration(flags.BanningDuration.Name)
conf.BanningEnabled = ctx.Bool(flags.BanningName)
conf.BanningThreshold = ctx.Float64(flags.BanningThresholdName)
conf.BanningDuration = ctx.Duration(flags.BanningDurationName)
return nil
}
func loadListenOpts(conf *p2p.Config, ctx *cli.Context) error {
listenIP := ctx.String(flags.ListenIP.Name)
listenIP := ctx.String(flags.ListenIPName)
if listenIP != "" { // optional
conf.ListenIP = net.ParseIP(listenIP)
if conf.ListenIP == nil {
......@@ -120,11 +120,11 @@ func loadListenOpts(conf *p2p.Config, ctx *cli.Context) error {
}
}
var err error
conf.ListenTCPPort, err = validatePort(ctx.Uint(flags.ListenTCPPort.Name))
conf.ListenTCPPort, err = validatePort(ctx.Uint(flags.ListenTCPPortName))
if err != nil {
return fmt.Errorf("bad listen TCP port: %w", err)
}
conf.ListenUDPPort, err = validatePort(ctx.Uint(flags.ListenUDPPort.Name))
conf.ListenUDPPort, err = validatePort(ctx.Uint(flags.ListenUDPPortName))
if err != nil {
return fmt.Errorf("bad listen UDP port: %w", err)
}
......@@ -132,20 +132,20 @@ func loadListenOpts(conf *p2p.Config, ctx *cli.Context) error {
}
func loadDiscoveryOpts(conf *p2p.Config, ctx *cli.Context) error {
if ctx.Bool(flags.NoDiscovery.Name) {
if ctx.Bool(flags.NoDiscoveryName) {
conf.NoDiscovery = true
}
var err error
conf.AdvertiseTCPPort, err = validatePort(ctx.Uint(flags.AdvertiseTCPPort.Name))
conf.AdvertiseTCPPort, err = validatePort(ctx.Uint(flags.AdvertiseTCPPortName))
if err != nil {
return fmt.Errorf("bad advertised TCP port: %w", err)
}
conf.AdvertiseUDPPort, err = validatePort(ctx.Uint(flags.AdvertiseUDPPort.Name))
conf.AdvertiseUDPPort, err = validatePort(ctx.Uint(flags.AdvertiseUDPPortName))
if err != nil {
return fmt.Errorf("bad advertised UDP port: %w", err)
}
adIP := ctx.String(flags.AdvertiseIP.Name)
adIP := ctx.String(flags.AdvertiseIPName)
if adIP != "" { // optional
ips, err := net.LookupIP(adIP)
if err != nil {
......@@ -163,7 +163,7 @@ func loadDiscoveryOpts(conf *p2p.Config, ctx *cli.Context) error {
}
}
dbPath := ctx.String(flags.DiscoveryPath.Name)
dbPath := ctx.String(flags.DiscoveryPathName)
if dbPath == "" {
dbPath = "opnode_discovery_db"
}
......@@ -176,7 +176,7 @@ func loadDiscoveryOpts(conf *p2p.Config, ctx *cli.Context) error {
}
bootnodes := make([]*enode.Node, 0)
records := strings.Split(ctx.String(flags.Bootnodes.Name), ",")
records := strings.Split(ctx.String(flags.BootnodesName), ",")
for i, recordB64 := range records {
recordB64 = strings.TrimSpace(recordB64)
if recordB64 == "" { // ignore empty records
......@@ -194,8 +194,8 @@ func loadDiscoveryOpts(conf *p2p.Config, ctx *cli.Context) error {
conf.Bootnodes = p2p.DefaultBootnodes
}
if ctx.IsSet(flags.NetRestrict.Name) {
netRestrict, err := netutil.ParseNetlist(ctx.String(flags.NetRestrict.Name))
if ctx.IsSet(flags.NetRestrictName) {
netRestrict, err := netutil.ParseNetlist(ctx.String(flags.NetRestrictName))
if err != nil {
return fmt.Errorf("failed to parse net list: %w", err)
}
......@@ -206,7 +206,7 @@ func loadDiscoveryOpts(conf *p2p.Config, ctx *cli.Context) error {
}
func loadLibp2pOpts(conf *p2p.Config, ctx *cli.Context) error {
addrs := strings.Split(ctx.String(flags.StaticPeers.Name), ",")
addrs := strings.Split(ctx.String(flags.StaticPeersName), ",")
for i, addr := range addrs {
addr = strings.TrimSpace(addr)
if addr == "" {
......@@ -219,7 +219,7 @@ func loadLibp2pOpts(conf *p2p.Config, ctx *cli.Context) error {
conf.StaticPeers = append(conf.StaticPeers, a)
}
for _, v := range strings.Split(ctx.String(flags.HostMux.Name), ",") {
for _, v := range strings.Split(ctx.String(flags.HostMuxName), ",") {
v = strings.ToLower(strings.TrimSpace(v))
switch v {
case "yamux":
......@@ -231,7 +231,7 @@ func loadLibp2pOpts(conf *p2p.Config, ctx *cli.Context) error {
}
}
secArr := strings.Split(ctx.String(flags.HostSecurity.Name), ",")
secArr := strings.Split(ctx.String(flags.HostSecurityName), ",")
for _, v := range secArr {
v = strings.ToLower(strings.TrimSpace(v))
switch v {
......@@ -249,16 +249,16 @@ func loadLibp2pOpts(conf *p2p.Config, ctx *cli.Context) error {
}
}
conf.PeersLo = ctx.Uint(flags.PeersLo.Name)
conf.PeersHi = ctx.Uint(flags.PeersHi.Name)
conf.PeersGrace = ctx.Duration(flags.PeersGrace.Name)
conf.NAT = ctx.Bool(flags.NAT.Name)
conf.UserAgent = ctx.String(flags.UserAgent.Name)
conf.TimeoutNegotiation = ctx.Duration(flags.TimeoutNegotiation.Name)
conf.TimeoutAccept = ctx.Duration(flags.TimeoutAccept.Name)
conf.TimeoutDial = ctx.Duration(flags.TimeoutDial.Name)
conf.PeersLo = ctx.Uint(flags.PeersLoName)
conf.PeersHi = ctx.Uint(flags.PeersHiName)
conf.PeersGrace = ctx.Duration(flags.PeersGraceName)
conf.NAT = ctx.Bool(flags.NATName)
conf.UserAgent = ctx.String(flags.UserAgentName)
conf.TimeoutNegotiation = ctx.Duration(flags.TimeoutNegotiationName)
conf.TimeoutAccept = ctx.Duration(flags.TimeoutAcceptName)
conf.TimeoutDial = ctx.Duration(flags.TimeoutDialName)
peerstorePath := ctx.String(flags.PeerstorePath.Name)
peerstorePath := ctx.String(flags.PeerstorePathName)
if peerstorePath == "" {
return errors.New("peerstore path must be specified, use 'memory' to explicitly not persist peer records")
}
......@@ -279,11 +279,11 @@ func loadLibp2pOpts(conf *p2p.Config, ctx *cli.Context) error {
}
func loadNetworkPrivKey(ctx *cli.Context) (*crypto.Secp256k1PrivateKey, error) {
raw := ctx.String(flags.P2PPrivRaw.Name)
raw := ctx.String(flags.P2PPrivRawName)
if raw != "" {
return parsePriv(raw)
}
keyPath := ctx.String(flags.P2PPrivPath.Name)
keyPath := ctx.String(flags.P2PPrivPathName)
if keyPath == "" {
return nil, errors.New("no p2p private key path specified, cannot auto-generate key without path")
}
......@@ -333,10 +333,10 @@ func parsePriv(data string) (*crypto.Secp256k1PrivateKey, error) {
}
func loadGossipOptions(conf *p2p.Config, ctx *cli.Context) error {
conf.MeshD = ctx.Int(flags.GossipMeshDFlag.Name)
conf.MeshDLo = ctx.Int(flags.GossipMeshDloFlag.Name)
conf.MeshDHi = ctx.Int(flags.GossipMeshDhiFlag.Name)
conf.MeshDLazy = ctx.Int(flags.GossipMeshDlazyFlag.Name)
conf.FloodPublish = ctx.Bool(flags.GossipFloodPublishFlag.Name)
conf.MeshD = ctx.Int(flags.GossipMeshDName)
conf.MeshDLo = ctx.Int(flags.GossipMeshDloName)
conf.MeshDHi = ctx.Int(flags.GossipMeshDhiName)
conf.MeshDLazy = ctx.Int(flags.GossipMeshDlazyName)
conf.FloodPublish = ctx.Bool(flags.GossipFloodPublishName)
return nil
}
......@@ -15,7 +15,7 @@ import (
// LoadSignerSetup loads a configuration for a Signer to be set up later
func LoadSignerSetup(ctx *cli.Context) (p2p.SignerSetup, error) {
key := ctx.String(flags.SequencerP2PKeyFlag.Name)
key := ctx.String(flags.SequencerP2PKeyName)
if key != "" {
// Mnemonics are bad because they leak *all* keys when they leak.
// Unencrypted keys from file are bad because they are easy to leak (and we are not checking file permissions).
......
......@@ -46,7 +46,7 @@
"express-prom-bundle": "^6.6.0",
"lodash": "^4.17.21",
"morgan": "^1.10.0",
"pino": "^8.15.6",
"pino": "^8.16.0",
"pino-multi-stream": "^6.0.0",
"pino-sentry": "^0.14.0",
"prom-client": "^14.2.0"
......
......@@ -59,7 +59,7 @@ Optimism's smart contracts are written in Solidity and we use [foundry](https://
1. [golang](https://golang.org/doc/install)
1. [python](https://www.python.org/downloads/)
Our [Style Guide](STYLE_GUIDE.md) contains information about the project structure, syntax preferences, naming conventions, and more. Please take a look at it before submitting a PR, and let us know if you spot inconcistencies!
Our [Style Guide](STYLE_GUIDE.md) contains information about the project structure, syntax preferences, naming conventions, and more. Please take a look at it before submitting a PR, and let us know if you spot inconsistencies!
Once you've read the styleguide and are ready to work on your PR, there are a plethora of useful `pnpm` scripts to know about that will help you with development:
1. `pnpm build` Builds the smart contracts.
......
......@@ -42,7 +42,7 @@
},
"devDependencies": {
"@typescript-eslint/eslint-plugin": "^6.7.0",
"@typescript-eslint/parser": "^6.4.0",
"@typescript-eslint/parser": "^6.7.5",
"tsx": "^3.12.7",
"typescript": "^5.2.2"
}
......
......@@ -54,7 +54,7 @@
"@vitest/coverage-istanbul": "^0.34.6",
"@wagmi/cli": "^1.5.2",
"@wagmi/core": "^1.4.3",
"abitype": "^0.9.9",
"abitype": "^0.9.10",
"glob": "^10.3.10",
"isomorphic-fetch": "^3.0.0",
"jest-dom": "link:@types/@testing-library/jest-dom",
......@@ -80,6 +80,6 @@
"@testing-library/react": "^14.0.0",
"react": "^18.2.0",
"react-dom": "^18.2.0",
"viem": "^1.15.1"
"viem": "^1.16.0"
}
}
......@@ -49,7 +49,7 @@
"node-fetch": "^2.6.7"
},
"devDependencies": {
"@types/node": "^20.7.2",
"@types/node": "^20.8.4",
"mocha": "^10.2.0"
}
}
......@@ -38,13 +38,13 @@
"@testing-library/jest-dom": "^6.0.1",
"@testing-library/react-hooks": "^8.0.1",
"@vitest/coverage-istanbul": "^0.34.6",
"abitype": "^0.9.9",
"abitype": "^0.9.10",
"isomorphic-fetch": "^3.0.0",
"jest-dom": "link:@types/@testing-library/jest-dom",
"jsdom": "^22.1.0",
"tsup": "^7.2.0",
"typescript": "^5.2.2",
"viem": "^1.15.1",
"viem": "^1.16.0",
"vite": "^4.4.10",
"vitest": "^0.34.2"
},
......
......@@ -44,19 +44,19 @@
"@types/chai": "^4.3.6",
"@types/chai-as-promised": "^7.1.5",
"@types/mocha": "^10.0.2",
"@types/node": "^20.7.2",
"@types/node": "^20.8.4",
"chai-as-promised": "^7.1.1",
"ethereum-waffle": "^4.0.10",
"ethers": "^5.7.2",
"hardhat": "^2.18.0",
"hardhat-deploy": "^0.11.4",
"hardhat-deploy": "^0.11.42",
"isomorphic-fetch": "^3.0.0",
"mocha": "^10.2.0",
"nyc": "^15.1.0",
"ts-node": "^10.9.1",
"typedoc": "^0.25.1",
"typescript": "^5.2.2",
"viem": "^1.15.1",
"viem": "^1.16.0",
"vitest": "^0.34.2",
"zod": "^3.22.4"
},
......
......@@ -37,7 +37,7 @@
"@vitest/coverage-istanbul": "^0.34.6",
"tsup": "^7.2.0",
"typescript": "^5.2.2",
"viem": "^1.15.1",
"viem": "^1.16.0",
"vite": "^4.4.10",
"vitest": "^0.34.1",
"zod": "^3.22.4"
......
This diff is collapsed.
......@@ -197,7 +197,7 @@ As mentioned earlier in [memory](#memory), all memory operations are 4-byte alig
Since pre-image I/O occurs on memory, all pre-image I/O operations must strictly adhere to alignment boundaries.
This means the start and end of a read/write operation must fall within the same alignment boundary.
If an operation were to violate this, the input `count` of the read/write syscall must be
truncated such that the effective address of the last byte read/writtten matches the input effective address.
truncated such that the effective address of the last byte read/written matches the input effective address.
The VM must read/write the maximum amount of bytes possible without crossing the input address alignment boundary.
For example, the effect of a write request for a 3-byte aligned buffer must be exactly 3 bytes.
......@@ -215,4 +215,4 @@ outside the general purpose registers).
VM implementations may raise an exception in other cases that is specific to the implementation.
For example, an on-chain FPVM that relies on pre-supplied merkle proofs for memory access may
raise an exception if the supplied merkle proof doees not match the pre-state `memRoot`.
raise an exception if the supplied merkle proof does not match the pre-state `memRoot`.
......@@ -21,13 +21,14 @@
- [Main content](#main-content)
- [Epilogue](#epilogue)
- [Pre-image hinting routes](#pre-image-hinting-routes)
- [`l1-header <blockhash>`](#l1-header-blockhash)
- [`l1-block-header <blockhash>`](#l1-block-header-blockhash)
- [`l1-transactions <blockhash>`](#l1-transactions-blockhash)
- [`l1-receipts <blockhash>`](#l1-receipts-blockhash)
- [`l2-header <blockhash>`](#l2-header-blockhash)
- [`l2-block-header <blockhash>`](#l2-block-header-blockhash)
- [`l2-transactions <blockhash>`](#l2-transactions-blockhash)
- [`l2-code <codehash>`](#l2-code-codehash)
- [`l2-state-node <nodehash>`](#l2-state-node-nodehash)
- [`l2-output <outputroot>`](#l2-output-outputroot)
- [Fault Proof VM](#fault-proof-vm)
- [Fault Proof Interactive Dispute Game](#fault-proof-interactive-dispute-game)
......@@ -341,7 +342,7 @@ This can be exposed via a CLI, or alternative inter-process API.
Every instance of `<blockhash>` in the below routes is `0x`-prefixed, lowercase, hex-encoded.
#### `l1-header <blockhash>`
#### `l1-block-header <blockhash>`
Requests the host to prepare the L1 block header RLP pre-image of the block `<blockhash>`.
......@@ -355,7 +356,7 @@ prepare the RLP pre-images of each of them, including transactions-list MPT node
Requests the host to prepare the list of receipts of the L1 block with `<blockhash>`:
prepare the RLP pre-images of each of them, including receipts-list MPT nodes.
#### `l2-header <blockhash>`
#### `l2-block-header <blockhash>`
Requests the host to prepare the L2 block header RLP pre-image of the block `<blockhash>`.
......@@ -372,6 +373,11 @@ Requests the host to prepare the L2 smart-contract code with the given `<codehas
Requests the host to prepare the L2 MPT node preimage with the given `<nodehash>`.
#### `l2-output <outputroot>`
Requests the host to prepare the L2 Output at the l2 output root `<outputroot>`.
The L2 Output is the preimage of a [computed output root](./proposals.md#l2-output-commitment-construction).
## Fault Proof VM
[VM]: #Fault-Proof-VM
......
......@@ -5,14 +5,24 @@ CI=false
GRAFANA_ADMIN_PWD=op
# Used by Test Services to query metrics. http://prometheus will use Docker's built-in DNS
PROMETHEUS_SERVER_URL="http://prometheus:9090"
METRICS_READ_URL="http://prometheus:9090/api/v1/query"
# The needed credentials to access METRICS_READ_URL. Will be sent as: Authorization: Bearer username:password
METRICS_READ_USERNAME=""
METRICS_READ_PASSWORD=""
# Used by Test Services to push metrics. http://pushgateway will use Docker's built-in DNS
PROMETHEUS_PUSHGATEWAY_URL="http://pushgateway:9091"
# If true, Metamask Test Service will install Xvfb inside it's container and used that for Playwright tests.
# If false, Metamask you will need to specify METAMASK_DISPLAY and METAMASK_DISPLAY_VOLUME so Playwright can connect to a display
METAMASK_RUN_HEADLESS=true
METRICS_WRITE_URL="http://pushgateway:9091"
# Dictates how the request body is structured when pushing metrics. Should be either "grafana" or "prometheus-pushgateway"
METRICS_WRITE_TOOL="prometheus-pushgateway"
# This is the source the pushed metric will be labeled as originting from. May not need this value
METRIC_WRITE_SOURCE=""
# The needed credentials to access METRICS_READ_URL. Will be sent as: Authorization: Bearer username:password
METRICS_WRITE_USERNAME=""
METRICS_WRITE_PASSWORD=""
# If true (or anything other than false), Xvfb will be inside the Metamask Test Service container and use it for Playwright tests.
# If false, you will need to specify METAMASK_DISPLAY and METAMASK_DISPLAY_VOLUME so Playwright can connect to a display
METAMASK_PLAYWRIGHT_RUN_HEADLESS=true
# The display used for running Playwright tests
METAMASK_DISPLAY=host.docker.internal:0
......@@ -23,8 +33,8 @@ METAMASK_DISPLAY_VOLUME=/tmp/.X11-unix:/tmp/.X11-unix
# Mnemonic used to initialize Metamask, make sure there's enough ETH to run tests
METAMASK_SECRET_WORDS_OR_PRIVATEKEY="test test test test test test test test test test test junk"
# The initial network Metamask will be initialized with, Test Service will override with OP Goerli
METAMASK_NETWORK="goerli"
# The initial network Metamask will be initialized with, Test Service will override with OP Sepolia
METAMASK_NETWORK="sepolia"
# The password to unlock Metamask
METAMASK_PASSWORD="T3st_P@ssw0rd!"
......@@ -32,5 +42,5 @@ METAMASK_PASSWORD="T3st_P@ssw0rd!"
# The URL of the Metamask test dApp that will be spun up automatically for testing against
METAMASK_DAPP_URL="http://localhost:9011"
# The OP Goerli RPC provider to be used to read/write data
METAMASK_OP_GOERLI_RPC_URL=""
# The OP Sepolia RPC provider to be used to read/write data
METAMASK_OP_SEPOLIA_RPC_URL=""
# Used by Test Services to perform certain actions if in CI environment
CI=true
CI=false
# This is the password used to login into Grafana dashboard as the admin user
GRAFANA_ADMIN_PWD=op
# Used by Test Services to query metrics. 172.17.0.1 is the default Docker gateway address
PROMETHEUS_SERVER_URL="http://172.17.0.1:9090"
# Used by Test Services to push metrics. 172.17.0.1 is the default Docker gateway address
PROMETHEUS_PUSHGATEWAY_URL="http://172.17.0.1:9091"
# If true, Metamask Test Service will install Xvfb inside it's container and used that for Playwright tests.
# If false, Metamask you will need to specify METAMASK_DISPLAY and METAMASK_DISPLAY_VOLUME so Playwright can connect to a display
METAMASK_RUN_HEADLESS=true
# Used by Test Services to query metrics. http://prometheus will use Docker's built-in DNS
METRICS_READ_URL="http://prometheus:9090/api/v1/query"
# The needed credentials to access METRICS_READ_URL. Will be sent as: Authorization: Bearer username:password
METRICS_READ_USERNAME=""
METRICS_READ_PASSWORD=""
# Used by Test Services to push metrics. http://pushgateway will use Docker's built-in DNS
METRICS_WRITE_URL="http://pushgateway:9091"
# Dictates how the request body is structured when pushing metrics. Should be either "grafana" or "prometheus-pushgateway"
METRICS_WRITE_TOOL="prometheus-pushgateway"
# This is the source the pushed metric will be labeled as originting from. May not need this value
METRIC_WRITE_SOURCE=""
# The needed credentials to access METRICS_READ_URL. Will be sent as: Authorization: Bearer username:password
METRICS_WRITE_USERNAME=""
METRICS_WRITE_PASSWORD=""
# If true (or anything other than false), Xvfb will be inside the Metamask Test Service container and use it for Playwright tests.
# If false, you will need to specify METAMASK_DISPLAY and METAMASK_DISPLAY_VOLUME so Playwright can connect to a display
METAMASK_PLAYWRIGHT_RUN_HEADLESS=true
# The display used for running Playwright tests
METAMASK_DISPLAY=host.docker.internal:0
......@@ -23,17 +33,14 @@ METAMASK_DISPLAY_VOLUME=/tmp/.X11-unix:/tmp/.X11-unix
# Mnemonic used to initialize Metamask, make sure there's enough ETH to run tests
METAMASK_SECRET_WORDS_OR_PRIVATEKEY="test test test test test test test test test test test junk"
# The initial network Metamask will be initialized with, Test Service will override with OP Goerli.
# Will be defaulted to goerli in Github workflow file
METAMASK_NETWORK="goerli"
# The initial network Metamask will be initialized with, Test Service will override with OP Goerli
METAMASK_NETWORK="sepolia"
# The password to unlock Metamask
# Will be defaulted to T3st_P@ssw0rd! in Github workflow file
METAMASK_PASSWORD="T3st_P@ssw0rd!"
# The URL of the Metamask test dApp that will be spun up automatically for testing against
# Will be defaulted to http://localhost:9011 in Github workflow file
METAMASK_DAPP_URL="http://localhost:9011"
# The OP Goerli RPC provider to be used to read/write data
METAMASK_OP_GOERLI_RPC_URL=""
# The OP Sepolia RPC provider to be used to read/write data
METAMASK_OP_SEPOLIA_RPC_URL=""
......@@ -49,14 +49,20 @@ services:
CI: ${CI}
DISPLAY: ${METAMASK_DISPLAY}
GRAFANA_ADMIN_PWD: ${GRAFANA_ADMIN_PWD}
PROMETHEUS_SERVER_URL: ${PROMETHEUS_SERVER_URL}
PROMETHEUS_PUSHGATEWAY_URL: ${PROMETHEUS_PUSHGATEWAY_URL}
METAMASK_RUN_HEADLESS: ${METAMASK_RUN_HEADLESS}
METRICS_READ_URL: ${METRICS_READ_URL}
METRICS_READ_USERNAME: ${METRICS_READ_USERNAME}
METRICS_READ_PASSWORD: ${METRICS_READ_PASSWORD}
METRICS_WRITE_URL: ${METRICS_WRITE_URL}
METRICS_WRITE_TOOL: ${METRICS_WRITE_TOOL}
METRIC_WRITE_SOURCE: ${METRIC_WRITE_SOURCE}
METRICS_WRITE_USERNAME: ${METRICS_WRITE_USERNAME}
METRICS_WRITE_PASSWORD: ${METRICS_WRITE_PASSWORD}
METAMASK_PLAYWRIGHT_RUN_HEADLESS: ${METAMASK_PLAYWRIGHT_RUN_HEADLESS}
METAMASK_SECRET_WORDS_OR_PRIVATEKEY: ${METAMASK_SECRET_WORDS_OR_PRIVATEKEY}
METAMASK_NETWORK: ${METAMASK_NETWORK}
METAMASK_PASSWORD: ${METAMASK_PASSWORD}
METAMASK_DAPP_URL: ${METAMASK_DAPP_URL}
METAMASK_OP_GOERLI_RPC_URL: ${METAMASK_OP_GOERLI_RPC_URL}
METAMASK_OP_SEPOLIA_RPC_URL: ${METAMASK_OP_SEPOLIA_RPC_URL}
volumes:
- ${METAMASK_DISPLAY_VOLUME:-/path/in/container/if/no/env/set}
restart: "no"
......@@ -18,6 +18,7 @@
"editable": true,
"fiscalYearStartMonth": 0,
"graphTooltip": 0,
"id": 1,
"links": [],
"liveNow": false,
"panels": [
......@@ -122,7 +123,7 @@
},
"disableTextWrap": false,
"editorMode": "builder",
"expr": "metamask_self_send",
"expr": "metamask_self_send_metric",
"fullMetaSearch": false,
"includeNullMetadata": true,
"instant": false,
......@@ -194,7 +195,7 @@
{
"matcher": {
"id": "byName",
"options": "metamask_self_send_fee_estimation_low"
"options": "metamask_self_send_fee_estimation_low_metric"
},
"properties": [
{
......@@ -213,7 +214,7 @@
{
"matcher": {
"id": "byName",
"options": "metamask_self_send_fee_estimation_medium"
"options": "metamask_self_send_fee_estimation_medium_metric"
},
"properties": [
{
......@@ -232,7 +233,7 @@
{
"matcher": {
"id": "byName",
"options": "metamask_self_send_fee_estimation_high"
"options": "metamask_self_send_fee_estimation_high_metric"
},
"properties": [
{
......@@ -244,7 +245,7 @@
{
"matcher": {
"id": "byName",
"options": "metamask_self_send_fee_estimation_actual"
"options": "metamask_self_send_fee_estimation_actual_metric"
},
"properties": [
{
......@@ -294,7 +295,7 @@
},
"disableTextWrap": false,
"editorMode": "builder",
"expr": "metamask_self_send_fee_estimation_low",
"expr": "metamask_self_send_fee_estimation_low_metric",
"fullMetaSearch": false,
"includeNullMetadata": true,
"instant": false,
......@@ -310,7 +311,7 @@
},
"disableTextWrap": false,
"editorMode": "builder",
"expr": "metamask_self_send_fee_estimation_medium",
"expr": "metamask_self_send_fee_estimation_medium_metric",
"fullMetaSearch": false,
"hide": false,
"includeNullMetadata": true,
......@@ -327,7 +328,7 @@
},
"disableTextWrap": false,
"editorMode": "builder",
"expr": "metamask_self_send_fee_estimation_high",
"expr": "metamask_self_send_fee_estimation_high_metric",
"fullMetaSearch": false,
"hide": false,
"includeNullMetadata": true,
......@@ -344,7 +345,7 @@
},
"disableTextWrap": false,
"editorMode": "builder",
"expr": "metamask_self_send_fee_estimation_actual",
"expr": "metamask_self_send_fee_estimation_actual_metric",
"fullMetaSearch": false,
"hide": false,
"includeNullMetadata": true,
......@@ -367,7 +368,7 @@
"list": []
},
"time": {
"from": "now-3h",
"from": "now-12h",
"to": "now"
},
"timepicker": {},
......
......@@ -7,7 +7,7 @@ WORKDIR /app
# Update PATH
ENV PATH /app/node_modules/.bin:$PATH
RUN if [ "$METAMASK_RUN_HEADLESS" = "true" ]; then \
RUN if [ "$METAMASK_RUN_HEADLESS" != "false" ]; then \
apt-get update && \
apt-get install -y xvfb && \
rm -rf /var/lib/apt/lists/* ; \
......
......@@ -17,10 +17,16 @@ jobs:
run: docker-compose run metamask
env:
CI: ${{ secrets.CI }}
PROMETHEUS_SERVER_URL: ${{ secrets.PROMETHEUS_SERVER_URL }}
PROMETHEUS_PUSHGATEWAY_URL: ${{ secrets.PROMETHEUS_PUSHGATEWAY_URL }}
METRICS_READ_URL: ${{ secrets.METRICS_READ_URL }}
METRICS_READ_USERNAME: ${{ secrets.METRICS_READ_USERNAME }}
METRICS_READ_PASSWORD: ${{ secrets.METRICS_READ_PASSWORD }}
METRICS_WRITE_URL: ${{ secrets.METRICS_WRITE_URL }}
METRICS_WRITE_TOOL: ${{ secrets.METRICS_WRITE_TOOL }}
METRIC_WRITE_SOURCE: ${{ secrets.METRIC_WRITE_SOURCE }}
METRICS_WRITE_USERNAME: ${{ secrets.METRICS_WRITE_USERNAME }}
METRICS_WRITE_PASSWORD: ${{ secrets.METRICS_WRITE_PASSWORD }}
METAMASK_SECRET_WORDS_OR_PRIVATEKEY: ${{ secrets.METAMASK_SECRET_WORDS_OR_PRIVATEKEY }}
METAMASK_NETWORK: ${{ secrets.METAMASK_NETWORK || 'goerli' }}
METAMASK_NETWORK: ${{ secrets.METAMASK_NETWORK || 'sepolia' }}
METAMASK_PASSWORD: ${{ secrets.METAMASK_PASSWORD || 'T3st_P@ssw0rd!' }}
METAMASK_DAPP_URL: ${{ secrets.METAMASK_DAPP_URL || 'http://localhost:9011' }}
METAMASK_OP_GOERLI_RPC_URL: ${{ secrets.METAMASK_OP_GOERLI_RPC_URL }}
METAMASK_OP_SEPOLIA_RPC_URL: ${{ secrets.METAMASK_OP_SEPOLIA_RPC_URL }}
......@@ -16,7 +16,7 @@ import {
const env = z
.object({
METAMASK_SECRET_WORDS_OR_PRIVATEKEY: z.string(),
METAMASK_OP_GOERLI_RPC_URL: z.string().url(),
METAMASK_OP_SEPOLIA_RPC_URL: z.string().url(),
METAMASK_DAPP_URL: z.string().url(),
})
.parse(process.env)
......@@ -30,6 +30,8 @@ const expectedSender = env.METAMASK_SECRET_WORDS_OR_PRIVATEKEY?.startsWith('0x')
).address.toLowerCase()
const expectedRecipient = expectedSender
const expectedCurrencySymbol = 'OPS'
let sharedPage: Page
let wasSuccessful: boolean
let handledFailure: boolean
......@@ -55,27 +57,26 @@ testWithSynpress('Setup wallet and dApp', async ({ page }) => {
console.log('Setting up wallet and dApp...')
sharedPage = page
await sharedPage.goto('http://localhost:9011')
console.log('Setup wallet and dApp')
})
testWithSynpress('Add OP Goerli network', async () => {
console.log('Adding OP Goerli network...')
const expectedChainId = '0x1a4'
testWithSynpress('Add OP Sepolia network', async () => {
console.log('Adding OP Sepolia network...')
const expectedChainId = '0xaa37dc'
await metamask.addNetwork({
name: 'op-goerli',
name: 'op-sepolia',
rpcUrls: {
default: {
http: [env.METAMASK_OP_GOERLI_RPC_URL],
http: [env.METAMASK_OP_SEPOLIA_RPC_URL],
},
},
id: '420',
id: '11155420',
nativeCurrency: {
symbol: 'OPG',
symbol: expectedCurrencySymbol,
},
blockExplorers: {
default: {
url: 'https://goerli-explorer.optimism.io',
url: 'https://optimism-sepolia.blockscout.com',
},
},
})
......@@ -87,7 +88,6 @@ testWithSynpress('Add OP Goerli network', async () => {
handledFailure = true
throw error
}
console.log('Added OP Goerli network')
})
test(`Connect wallet with ${expectedSender}`, async () => {
......@@ -102,7 +102,6 @@ test(`Connect wallet with ${expectedSender}`, async () => {
handledFailure = true
throw error
}
console.log(`Connected wallet with ${expectedSender}`)
})
test('Send an EIP-1559 transaction and verify success', async () => {
......@@ -127,6 +126,7 @@ test('Send an EIP-1559 transaction and verify success', async () => {
const notificationPage =
await synpressPlaywright.switchToMetamaskNotification()
console.log('Gathering transaction fee estimations...')
const lowFeeEstimate = await getFeeEstimateInGwei(
confirmPageElements.gasOptionLowButton,
'Low',
......@@ -146,6 +146,7 @@ test('Send an EIP-1559 transaction and verify success', async () => {
notificationPage
)
console.log('Sent transaction, waiting for confirmation...')
await metamask.confirmTransactionAndWaitForMining()
const txHash = await txHashPromise
......@@ -164,6 +165,7 @@ test('Send an EIP-1559 transaction and verify success', async () => {
// Metamask test dApp allows us access to the Metamask RPC provider via loading this URL.
// The RPC response will be populated onto the page that's loaded.
// More info here: https://github.com/MetaMask/test-dapp/tree/main#usage
console.log('Retrieving transaction receipt...')
await sharedPage.goto(
`${env.METAMASK_DAPP_URL}/request.html?method=eth_getTransactionReceipt&params=["${txHash}"]`
)
......@@ -179,7 +181,6 @@ test('Send an EIP-1559 transaction and verify success', async () => {
handledFailure = true
throw error
}
console.log('Sent an EIP-1559 transaction and verified success')
await setFeeEstimationGauge('low', lowFeeEstimate)
await setFeeEstimationGauge('medium', mediumFeeEstimate)
......@@ -192,7 +193,6 @@ const getFeeEstimateInGwei = async (
waitForText: 'Low' | 'Market' | 'Aggressive',
notificationPage: Page
) => {
const regexParseEtherValue = /(\d+\.\d+)\sOPG/
await synpressPlaywright.waitAndClick(
confirmPageElements.editGasFeeButton,
notificationPage
......@@ -203,6 +203,7 @@ const getFeeEstimateInGwei = async (
waitForText,
notificationPage
)
const regexParseEtherValue = /(\d+\.\d+)\s?OPS/
const feeValue = (
await synpressPlaywright.waitAndGetValue(
confirmPageElements.totalLabel,
......
......@@ -4,16 +4,63 @@ import { Gauge, Pushgateway, Registry } from 'prom-client'
const env = z
.object({
PROMETHEUS_SERVER_URL: z.string().url(),
PROMETHEUS_PUSHGATEWAY_URL: z.string().url(),
METRICS_READ_URL: z.string().url(),
METRICS_READ_USERNAME: z.string().optional(),
METRICS_READ_PASSWORD: z.string().optional(),
METRICS_WRITE_URL: z.string().url(),
METRICS_WRITE_TOOL: z.enum(['grafana', 'prometheus-pushgateway']),
METRIC_WRITE_SOURCE: z.string().optional(),
METRICS_WRITE_USERNAME: z.string().optional(),
METRICS_WRITE_PASSWORD: z.string().optional(),
})
.refine(
(data) => {
if (
(data.METRICS_READ_USERNAME && !data.METRICS_READ_PASSWORD) ||
(data.METRICS_READ_PASSWORD && !data.METRICS_READ_USERNAME)
) {
return false
}
if (
(data.METRICS_WRITE_USERNAME && !data.METRICS_WRITE_PASSWORD) ||
(data.METRICS_WRITE_PASSWORD && !data.METRICS_WRITE_USERNAME)
) {
return false
}
return true
},
{
message:
'Both username and password must be provided together for read or write metrics',
}
)
.refine(
(data) => {
if (
data.METRICS_WRITE_TOOL === 'grafana' &&
data.METRIC_WRITE_SOURCE === undefined
)
return false
return true
},
{
message:
'Writing to Grafana requires a source, please specify one using METRIC_WRITE_SOURCE env',
}
)
.parse(process.env)
const selfSendTransactionMetricName = 'metamask_self_send'
const feeEstimateLowMetricName = 'metamask_self_send_fee_estimation_low'
const feeEstimateMediumMetricName = 'metamask_self_send_fee_estimation_medium'
const feeEstimateHighMetricName = 'metamask_self_send_fee_estimation_high'
const feeEstimateActualMetricName = 'metamask_self_send_fee_estimation_actual'
const selfSendTransactionMetricName = 'metamask_self_send_metric'
const feeEstimateLowMetricName = 'metamask_self_send_fee_estimation_low_metric'
const feeEstimateMediumMetricName =
'metamask_self_send_fee_estimation_medium_metric'
const feeEstimateHighMetricName =
'metamask_self_send_fee_estimation_high_metric'
const feeEstimateActualMetricName =
'metamask_self_send_fee_estimation_actual_metric'
const selfSendRegistry = new Registry()
const feeEstimateLowRegistry = new Registry()
......@@ -24,38 +71,51 @@ const feeEstimateActualRegistry = new Registry()
const selfSendGauge = new Gauge({
name: selfSendTransactionMetricName,
help: 'A gauge signifying the number of transactions sent with Metamask',
registers: [selfSendRegistry]
registers: [selfSendRegistry],
})
const feeEstimateLowGauge = new Gauge({
name: feeEstimateLowMetricName,
help: 'A gauge signifying the latest fee estimation from Metamask for Low transaction speed',
registers: [feeEstimateLowRegistry]
registers: [feeEstimateLowRegistry],
})
const feeEstimateMediumGauge = new Gauge({
name: feeEstimateMediumMetricName,
help: 'A gauge signifying the latest fee estimation from Metamask for Medium transaction speed',
registers: [feeEstimateMediumRegistry]
registers: [feeEstimateMediumRegistry],
})
const feeEstimateHighGauge = new Gauge({
name: feeEstimateHighMetricName,
help: 'A gauge signifying the latest fee estimation from Metamask for High transaction speed',
registers: [feeEstimateHighRegistry]
registers: [feeEstimateHighRegistry],
})
const feeEstimateActualGauge = new Gauge({
name: feeEstimateActualMetricName,
help: 'A gauge signifying the latest actual transaction fee',
registers: [feeEstimateActualRegistry]
registers: [feeEstimateActualRegistry],
})
export const getSelfSendGaugeValue = async () => {
const prometheusMetricQuery = `${env.PROMETHEUS_SERVER_URL}/api/v1/query?query=${selfSendTransactionMetricName}`
const response = await fetch(prometheusMetricQuery)
const queryMetricsReadUrl = async (
query: string = selfSendTransactionMetricName
) => {
const metricsReadRequest = `${env.METRICS_READ_URL}?query=${query}`
const response = await fetch(metricsReadRequest, {
headers:
env.METRICS_READ_USERNAME === undefined
? undefined
: {
Authorization: `Bearer ${env.METRICS_READ_USERNAME}:${env.METRICS_READ_PASSWORD}`,
},
})
if (!response.ok) {
console.error(response.status)
console.error(response.statusText)
throw new Error(`Failed to fetch metric from: ${prometheusMetricQuery}`)
throw new Error(`Failed to fetch metric from: ${metricsReadRequest}`)
}
return response
}
export const getSelfSendGaugeValue = async () => {
const response = await queryMetricsReadUrl(selfSendTransactionMetricName)
// The following is an example of the expect response from prometheusMetricQuery
// for response.json().data.result[0]:
......@@ -92,7 +152,7 @@ export const getSelfSendGaugeValue = async () => {
error.message === "Cannot read properties of undefined (reading 'value')"
) {
console.warn(
`No data found for metric ${selfSendTransactionMetricName} in Prometheus`
`No data found for metric ${selfSendTransactionMetricName} in ${env.METRICS_READ_URL}`
)
return undefined
}
......@@ -101,12 +161,44 @@ export const getSelfSendGaugeValue = async () => {
}
}
const pushMetricsGrafana = (metricName: string, valueToSetTo: number) =>
pushMetricsWriteUrl(
`${metricName},source=${
env.METRIC_WRITE_SOURCE
} metric=${valueToSetTo}`
)
const pushMetricsPrometheusPushgateway = (registry: Registry) => {
const pushGateway = new Pushgateway(env.METRICS_WRITE_URL, undefined, registry)
return pushGateway.pushAdd({ jobName: 'ufm-metamask-metric-push'})
}
const pushMetricsWriteUrl = async (requestBody: string) => {
const response = await fetch(env.METRICS_WRITE_URL, {
method: 'POST',
headers:
env.METRICS_WRITE_USERNAME === undefined
? undefined
: {
Authorization: `Bearer ${env.METRICS_WRITE_USERNAME}:${env.METRICS_WRITE_PASSWORD}`,
},
body: requestBody,
})
if (!response.ok) {
console.error(response.status)
console.error(response.statusText)
throw new Error(`Failed to push metric to: ${env.METRICS_WRITE_URL}`)
}
return response
}
export const setSelfSendTxGauge = async (valueToSetTo: number) => {
console.log(`Setting ${selfSendTransactionMetricName} to ${valueToSetTo}...`)
selfSendGauge.set(valueToSetTo)
const pushGateway = new Pushgateway(env.PROMETHEUS_PUSHGATEWAY_URL, undefined, selfSendRegistry)
await pushGateway.pushAdd({ jobName: 'metamask_self_send_tx_count' })
env.METRICS_WRITE_TOOL === 'grafana'
? await pushMetricsGrafana(selfSendTransactionMetricName.replace('_metric', ''), valueToSetTo)
: await pushMetricsPrometheusPushgateway(selfSendRegistry)
}
export const incrementSelfSendTxGauge = async (isSuccess: boolean) => {
......@@ -125,36 +217,41 @@ export const incrementSelfSendTxGauge = async (isSuccess: boolean) => {
await setSelfSendTxGauge(newMetricValue)
}
export const setFeeEstimationGauge = async (txSpeed: 'low' | 'medium' | 'high' | 'actual', fee: number) => {
console.log(
txSpeed !== 'actual'
? `Setting Metamask fee estimation for ${txSpeed} to ${fee}...`
: `Setting actual transaction fee to ${fee}`
)
export const setFeeEstimationGauge = async (
txSpeed: 'low' | 'medium' | 'high' | 'actual',
fee: number
) => {
let metricNameGrafana: string
let prometheusRegistry: Registry
switch (txSpeed) {
case 'low':
feeEstimateLowGauge.set(fee)
metricNameGrafana = feeEstimateLowMetricName
prometheusRegistry = feeEstimateLowRegistry
break;
break
case 'medium':
feeEstimateMediumGauge.set(fee)
metricNameGrafana = feeEstimateMediumMetricName
prometheusRegistry = feeEstimateMediumRegistry
break;
break
case 'high':
feeEstimateHighGauge.set(fee)
metricNameGrafana = feeEstimateHighMetricName
prometheusRegistry = feeEstimateHighRegistry
break;
break
case 'actual':
feeEstimateActualGauge.set(fee)
metricNameGrafana = feeEstimateActualMetricName
prometheusRegistry = feeEstimateActualRegistry
break;
break
default:
throw new Error(`unsupported transaction speed given: ${txSpeed}`)
}
metricNameGrafana = metricNameGrafana.replace('_metric', '')
console.log(`Setting ${metricNameGrafana} to ${fee}...`)
const pushGateway = new Pushgateway(env.PROMETHEUS_PUSHGATEWAY_URL, undefined, prometheusRegistry)
await pushGateway.pushAdd({ jobName: `metamask_self_send_tx_fee_estimation_${txSpeed}` })
env.METRICS_WRITE_TOOL === 'grafana'
? await pushMetricsGrafana(metricNameGrafana, fee)
: await pushMetricsPrometheusPushgateway(prometheusRegistry)
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment