Commit df5d0632 authored by mergify[bot]'s avatar mergify[bot] Committed by GitHub

Merge branch 'develop' into dependabot/npm_and_yarn/eslint-plugin-react-7.33.2

parents 9324e0c7 dc56c5e9
docker-compose.dev.yml docker-compose.dev.yml
.env .env
indexer /indexer
package api package api
import ( import (
"context"
"fmt" "fmt"
"net/http" "net/http"
"github.com/ethereum-optimism/optimism/indexer/api/routes" "github.com/ethereum-optimism/optimism/indexer/api/routes"
"github.com/ethereum-optimism/optimism/indexer/database" "github.com/ethereum-optimism/optimism/indexer/database"
"github.com/ethereum-optimism/optimism/op-service/httputil"
"github.com/ethereum/go-ethereum/log" "github.com/ethereum/go-ethereum/log"
"github.com/go-chi/chi/v5" "github.com/go-chi/chi/v5"
) )
...@@ -13,25 +15,29 @@ import ( ...@@ -13,25 +15,29 @@ import (
const ethereumAddressRegex = `^0x[a-fA-F0-9]{40}$` const ethereumAddressRegex = `^0x[a-fA-F0-9]{40}$`
type Api struct { type Api struct {
log log.Logger
Router *chi.Mux Router *chi.Mux
} }
func NewApi(bv database.BridgeTransfersView, logger log.Logger) *Api { func NewApi(logger log.Logger, bv database.BridgeTransfersView) *Api {
logger.Info("Initializing API...")
r := chi.NewRouter() r := chi.NewRouter()
h := routes.NewRoutes(logger, bv, r) h := routes.NewRoutes(logger, bv, r)
api := &Api{Router: r}
r.Get("/healthz", h.HealthzHandler) r.Get("/healthz", h.HealthzHandler)
r.Get(fmt.Sprintf("/api/v0/deposits/{address:%s}", ethereumAddressRegex), h.L1DepositsHandler) r.Get(fmt.Sprintf("/api/v0/deposits/{address:%s}", ethereumAddressRegex), h.L1DepositsHandler)
r.Get(fmt.Sprintf("/api/v0/withdrawals/{address:%s}", ethereumAddressRegex), h.L2WithdrawalsHandler) r.Get(fmt.Sprintf("/api/v0/withdrawals/{address:%s}", ethereumAddressRegex), h.L2WithdrawalsHandler)
return &Api{log: logger, Router: r}
return api
} }
func (a *Api) Listen(port string) error { func (a *Api) Listen(ctx context.Context, port int) error {
return http.ListenAndServe(port, a.Router) a.log.Info("starting api server", "port", port)
server := http.Server{Addr: fmt.Sprintf(":%d", port), Handler: a.Router}
err := httputil.ListenAndServeContext(ctx, &server)
if err != nil {
a.log.Error("api server shutdown", "err", err)
} else {
a.log.Info("api server shutdown")
}
return err
} }
...@@ -77,7 +77,7 @@ func (mbv *MockBridgeTransfersView) L2BridgeWithdrawalsByAddress(address common. ...@@ -77,7 +77,7 @@ func (mbv *MockBridgeTransfersView) L2BridgeWithdrawalsByAddress(address common.
} }
func TestHealthz(t *testing.T) { func TestHealthz(t *testing.T) {
logger := testlog.Logger(t, log.LvlInfo) logger := testlog.Logger(t, log.LvlInfo)
api := NewApi(&MockBridgeTransfersView{}, logger) api := NewApi(logger, &MockBridgeTransfersView{})
request, err := http.NewRequest("GET", "/healthz", nil) request, err := http.NewRequest("GET", "/healthz", nil)
assert.Nil(t, err) assert.Nil(t, err)
...@@ -89,7 +89,7 @@ func TestHealthz(t *testing.T) { ...@@ -89,7 +89,7 @@ func TestHealthz(t *testing.T) {
func TestL1BridgeDepositsHandler(t *testing.T) { func TestL1BridgeDepositsHandler(t *testing.T) {
logger := testlog.Logger(t, log.LvlInfo) logger := testlog.Logger(t, log.LvlInfo)
api := NewApi(&MockBridgeTransfersView{}, logger) api := NewApi(logger, &MockBridgeTransfersView{})
request, err := http.NewRequest("GET", fmt.Sprintf("/api/v0/deposits/%s", mockAddress), nil) request, err := http.NewRequest("GET", fmt.Sprintf("/api/v0/deposits/%s", mockAddress), nil)
assert.Nil(t, err) assert.Nil(t, err)
...@@ -101,7 +101,7 @@ func TestL1BridgeDepositsHandler(t *testing.T) { ...@@ -101,7 +101,7 @@ func TestL1BridgeDepositsHandler(t *testing.T) {
func TestL2BridgeWithdrawalsByAddressHandler(t *testing.T) { func TestL2BridgeWithdrawalsByAddressHandler(t *testing.T) {
logger := testlog.Logger(t, log.LvlInfo) logger := testlog.Logger(t, log.LvlInfo)
api := NewApi(&MockBridgeTransfersView{}, logger) api := NewApi(logger, &MockBridgeTransfersView{})
request, err := http.NewRequest("GET", fmt.Sprintf("/api/v0/withdrawals/%s", mockAddress), nil) request, err := http.NewRequest("GET", fmt.Sprintf("/api/v0/withdrawals/%s", mockAddress), nil)
assert.Nil(t, err) assert.Nil(t, err)
......
package cli package main
import ( import (
"context" "context"
"fmt"
"strconv"
"github.com/ethereum-optimism/optimism/indexer" "github.com/ethereum-optimism/optimism/indexer"
"github.com/ethereum-optimism/optimism/indexer/api" "github.com/ethereum-optimism/optimism/indexer/api"
...@@ -16,26 +14,25 @@ import ( ...@@ -16,26 +14,25 @@ import (
"github.com/urfave/cli/v2" "github.com/urfave/cli/v2"
) )
type Cli struct { var (
GitVersion string ConfigFlag = &cli.StringFlag{
GitCommit string Name: "config",
GitDate string Value: "./indexer.toml",
app *cli.App Aliases: []string{"c"},
Flags []cli.Flag Usage: "path to config file",
} EnvVars: []string{"INDEXER_CONFIG"},
}
)
func runIndexer(ctx *cli.Context) error { func runIndexer(ctx *cli.Context) error {
logger := log.NewLogger(log.ReadCLIConfig(ctx)) logger := log.NewLogger(log.ReadCLIConfig(ctx))
cfg, err := config.LoadConfig(logger, ctx.String(ConfigFlag.Name))
configPath := ctx.String(ConfigFlag.Name)
cfg, err := config.LoadConfig(logger, configPath)
if err != nil { if err != nil {
logger.Error("failed to load config", "err", err) logger.Error("failed to load config", "err", err)
return err return err
} }
db, err := database.NewDB(cfg.DB) db, err := database.NewDB(cfg.DB)
if err != nil { if err != nil {
return err return err
} }
...@@ -48,6 +45,7 @@ func runIndexer(ctx *cli.Context) error { ...@@ -48,6 +45,7 @@ func runIndexer(ctx *cli.Context) error {
indexerCtx, indexerCancel := context.WithCancel(context.Background()) indexerCtx, indexerCancel := context.WithCancel(context.Background())
go func() { go func() {
opio.BlockOnInterrupts() opio.BlockOnInterrupts()
logger.Error("caught interrupt, shutting down...")
indexerCancel() indexerCancel()
}() }()
...@@ -56,47 +54,35 @@ func runIndexer(ctx *cli.Context) error { ...@@ -56,47 +54,35 @@ func runIndexer(ctx *cli.Context) error {
func runApi(ctx *cli.Context) error { func runApi(ctx *cli.Context) error {
logger := log.NewLogger(log.ReadCLIConfig(ctx)) logger := log.NewLogger(log.ReadCLIConfig(ctx))
cfg, err := config.LoadConfig(logger, ctx.String(ConfigFlag.Name))
configPath := ctx.String(ConfigFlag.Name)
cfg, err := config.LoadConfig(logger, configPath)
if err != nil { if err != nil {
logger.Error("failed to load config", "err", err) logger.Error("failed to load config", "err", err)
return err return err
} }
db, err := database.NewDB(cfg.DB) db, err := database.NewDB(cfg.DB)
if err != nil { if err != nil {
logger.Crit("Failed to connect to database", "err", err) logger.Crit("Failed to connect to database", "err", err)
} }
server := api.NewApi(db.BridgeTransfers, logger) apiCtx, apiCancel := context.WithCancel(context.Background())
api := api.NewApi(logger, db.BridgeTransfers)
return server.Listen(strconv.Itoa(cfg.API.Port)) go func() {
} opio.BlockOnInterrupts()
logger.Error("caught interrupt, shutting down...")
var ( apiCancel()
ConfigFlag = &cli.StringFlag{ }()
Name: "config",
Value: "./indexer.toml",
Aliases: []string{"c"},
Usage: "path to config file",
EnvVars: []string{"INDEXER_CONFIG"},
}
)
// make a instance method on Cli called Run that runs cli return api.Listen(apiCtx, cfg.API.Port)
// and returns an error
func (c *Cli) Run(args []string) error {
return c.app.Run(args)
} }
func NewCli(GitVersion string, GitCommit string, GitDate string) *Cli { func newCli(GitCommit string, GitDate string) *cli.App {
flags := []cli.Flag{ConfigFlag} flags := []cli.Flag{ConfigFlag}
flags = append(flags, log.CLIFlags("INDEXER")...) flags = append(flags, log.CLIFlags("INDEXER")...)
app := &cli.App{ return &cli.App{
Version: fmt.Sprintf("%s-%s", GitVersion, params.VersionWithCommit(GitCommit, GitDate)), Version: params.VersionWithCommit(GitCommit, GitDate),
Description: "An indexer of all optimism events with a serving api layer", Description: "An indexer of all optimism events with a serving api layer",
EnableBashCompletion: true,
Commands: []*cli.Command{ Commands: []*cli.Command{
{ {
Name: "api", Name: "api",
...@@ -110,11 +96,14 @@ func NewCli(GitVersion string, GitCommit string, GitDate string) *Cli { ...@@ -110,11 +96,14 @@ func NewCli(GitVersion string, GitCommit string, GitDate string) *Cli {
Description: "Runs the indexing service", Description: "Runs the indexing service",
Action: runIndexer, Action: runIndexer,
}, },
{
Name: "version",
Description: "print version",
Action: func(ctx *cli.Context) error {
cli.ShowVersion(ctx)
return nil
},
},
}, },
} }
return &Cli{
app: app,
Flags: flags,
}
} }
...@@ -3,20 +3,17 @@ package main ...@@ -3,20 +3,17 @@ package main
import ( import (
"os" "os"
"github.com/ethereum-optimism/optimism/indexer/cli"
"github.com/ethereum/go-ethereum/log" "github.com/ethereum/go-ethereum/log"
) )
var ( var (
GitVersion = "" GitCommit = ""
GitCommit = "" GitDate = ""
GitDate = ""
) )
func main() { func main() {
app := cli.NewCli(GitVersion, GitCommit, GitDate) app := newCli(GitCommit, GitDate)
if err := app.Run(os.Args); err != nil { if err := app.Run(os.Args); err != nil {
log.Crit("Application failed", "message", err) log.Crit("application failed", "err", err)
} }
} }
...@@ -99,20 +99,18 @@ type MetricsConfig struct { ...@@ -99,20 +99,18 @@ type MetricsConfig struct {
// LoadConfig loads the `indexer.toml` config file from a given path // LoadConfig loads the `indexer.toml` config file from a given path
func LoadConfig(logger geth_log.Logger, path string) (Config, error) { func LoadConfig(logger geth_log.Logger, path string) (Config, error) {
logger.Info("Loading config file", "path", path) logger.Debug("loading config", "path", path)
var conf Config
var conf Config
data, err := os.ReadFile(path) data, err := os.ReadFile(path)
if err != nil { if err != nil {
return conf, err return conf, err
} }
data = []byte(os.ExpandEnv(string(data))) data = []byte(os.ExpandEnv(string(data)))
logger.Debug("parsed config file", "data", string(data))
logger.Debug("Decoding config file", "data", string(data))
if _, err := toml.Decode(string(data), &conf); err != nil { if _, err := toml.Decode(string(data), &conf); err != nil {
logger.Info("Failed to decode config file", "message", err) logger.Info("failed to decode config file", "err", err)
return conf, err return conf, err
} }
...@@ -125,7 +123,6 @@ func LoadConfig(logger geth_log.Logger, path string) (Config, error) { ...@@ -125,7 +123,6 @@ func LoadConfig(logger geth_log.Logger, path string) (Config, error) {
} }
} }
logger.Debug("Loaded config file", conf) logger.Info("loaded config")
return conf, nil return conf, nil
} }
...@@ -4,46 +4,134 @@ ...@@ -4,46 +4,134 @@
package safe package safe
import ( import (
"bytes"
"encoding/json" "encoding/json"
"fmt" "fmt"
"math/big" "math/big"
"strings"
"golang.org/x/exp/maps"
"github.com/ethereum/go-ethereum/accounts/abi"
"github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/common/hexutil" "github.com/ethereum/go-ethereum/common/hexutil"
"github.com/ethereum/go-ethereum/crypto"
) )
// BatchFile represents a Safe tx-builder transaction. // Batch represents a Safe tx-builder transaction.
type BatchFile struct { type Batch struct {
Version string `json:"version"` Version string `json:"version"`
ChainID *big.Int `json:"chainId"` ChainID *big.Int `json:"chainId"`
CreatedAt uint64 `json:"createdAt"` CreatedAt uint64 `json:"createdAt"`
Meta BatchFileMeta `json:"meta"` Meta BatchMeta `json:"meta"`
Transactions []BatchTransaction `json:"transactions"` Transactions []BatchTransaction `json:"transactions"`
} }
// AddCall will add a call to the batch. After a series of calls are
// added to the batch, it can be serialized to JSON.
func (b *Batch) AddCall(to common.Address, value *big.Int, sig string, args []any, iface abi.ABI) error {
// Attempt to pull out the signature from the top level methods.
// The abi package uses normalization that we do not want to be
// coupled to, so attempt to search for the raw name if the top
// level name is not found to handle overloading more gracefully.
method, ok := iface.Methods[sig]
if !ok {
for _, m := range iface.Methods {
if m.RawName == sig || m.Sig == sig {
method = m
ok = true
}
}
}
if !ok {
keys := maps.Keys(iface.Methods)
methods := strings.Join(keys, ",")
return fmt.Errorf("%s not found in abi, options are %s", sig, methods)
}
if len(args) != len(method.Inputs) {
return fmt.Errorf("requires %d inputs but got %d for %s", len(method.Inputs), len(args), method.RawName)
}
contractMethod := ContractMethod{
Name: method.RawName,
Payable: method.Payable,
}
inputValues := make(map[string]string)
contractInputs := make([]ContractInput, 0)
for i, input := range method.Inputs {
contractInput, err := createContractInput(input, contractInputs)
if err != nil {
return err
}
contractMethod.Inputs = append(contractMethod.Inputs, contractInput...)
str, err := stringifyArg(args[i])
if err != nil {
return err
}
inputValues[input.Name] = str
}
encoded, err := method.Inputs.PackValues(args)
if err != nil {
return err
}
data := make([]byte, len(method.ID)+len(encoded))
copy(data, method.ID)
copy(data[len(method.ID):], encoded)
batchTransaction := BatchTransaction{
To: to,
Value: value,
Method: contractMethod,
Data: data,
InputValues: inputValues,
}
b.Transactions = append(b.Transactions, batchTransaction)
return nil
}
// Check will check the batch for errors
func (b *Batch) Check() error {
for _, tx := range b.Transactions {
if err := tx.Check(); err != nil {
return err
}
}
return nil
}
// bathcFileMarshaling is a helper type used for JSON marshaling. // bathcFileMarshaling is a helper type used for JSON marshaling.
type batchFileMarshaling struct { type batchMarshaling struct {
Version string `json:"version"` Version string `json:"version"`
ChainID string `json:"chainId"` ChainID string `json:"chainId"`
CreatedAt uint64 `json:"createdAt"` CreatedAt uint64 `json:"createdAt"`
Meta BatchFileMeta `json:"meta"` Meta BatchMeta `json:"meta"`
Transactions []BatchTransaction `json:"transactions"` Transactions []BatchTransaction `json:"transactions"`
} }
// MarshalJSON will marshal a BatchFile to JSON. // MarshalJSON will marshal a Batch to JSON.
func (b *BatchFile) MarshalJSON() ([]byte, error) { func (b *Batch) MarshalJSON() ([]byte, error) {
return json.Marshal(batchFileMarshaling{ batch := batchMarshaling{
Version: b.Version, Version: b.Version,
ChainID: b.ChainID.String(),
CreatedAt: b.CreatedAt, CreatedAt: b.CreatedAt,
Meta: b.Meta, Meta: b.Meta,
Transactions: b.Transactions, Transactions: b.Transactions,
}) }
if b.ChainID != nil {
batch.ChainID = b.ChainID.String()
}
return json.Marshal(batch)
} }
// UnmarshalJSON will unmarshal a BatchFile from JSON. // UnmarshalJSON will unmarshal a Batch from JSON.
func (b *BatchFile) UnmarshalJSON(data []byte) error { func (b *Batch) UnmarshalJSON(data []byte) error {
var bf batchFileMarshaling var bf batchMarshaling
if err := json.Unmarshal(data, &bf); err != nil { if err := json.Unmarshal(data, &bf); err != nil {
return err return err
} }
...@@ -59,9 +147,9 @@ func (b *BatchFile) UnmarshalJSON(data []byte) error { ...@@ -59,9 +147,9 @@ func (b *BatchFile) UnmarshalJSON(data []byte) error {
return nil return nil
} }
// BatchFileMeta contains metadata about a BatchFile. Not all // BatchMeta contains metadata about a Batch. Not all
// of the fields are required. // of the fields are required.
type BatchFileMeta struct { type BatchMeta struct {
TxBuilderVersion string `json:"txBuilderVersion,omitempty"` TxBuilderVersion string `json:"txBuilderVersion,omitempty"`
Checksum string `json:"checksum,omitempty"` Checksum string `json:"checksum,omitempty"`
CreatedFromSafeAddress string `json:"createdFromSafeAddress"` CreatedFromSafeAddress string `json:"createdFromSafeAddress"`
...@@ -79,6 +167,81 @@ type BatchTransaction struct { ...@@ -79,6 +167,81 @@ type BatchTransaction struct {
InputValues map[string]string `json:"contractInputsValues"` InputValues map[string]string `json:"contractInputsValues"`
} }
// Check will check the batch transaction for errors.
// An error is defined by:
// - incorrectly encoded calldata
// - mismatch in number of arguments
// It does not currently work on structs, will return no error if a "tuple"
// is used as an argument. Need to find a generic way to work with structs.
func (bt *BatchTransaction) Check() error {
if len(bt.Method.Inputs) != len(bt.InputValues) {
return fmt.Errorf("expected %d inputs but got %d", len(bt.Method.Inputs), len(bt.InputValues))
}
if len(bt.Data) > 0 && bt.Method.Name != "fallback" {
if len(bt.Data) < 4 {
return fmt.Errorf("must have at least 4 bytes of calldata, got %d", len(bt.Data))
}
sig := bt.Signature()
selector := crypto.Keccak256([]byte(sig))[0:4]
if !bytes.Equal(bt.Data[0:4], selector) {
return fmt.Errorf("data does not match signature")
}
// Check the calldata
values := make([]any, len(bt.Method.Inputs))
for i, input := range bt.Method.Inputs {
value, ok := bt.InputValues[input.Name]
if !ok {
return fmt.Errorf("missing input %s", input.Name)
}
// Need to figure out better way to handle tuples in a generic way
if input.Type == "tuple" {
return nil
}
arg, err := unstringifyArg(value, input.Type)
if err != nil {
return err
}
values[i] = arg
}
calldata, err := bt.Arguments().PackValues(values)
if err != nil {
return err
}
if !bytes.Equal(bt.Data[4:], calldata) {
return fmt.Errorf("calldata does not match inputs, expected %s, got %s", hexutil.Encode(bt.Data[4:]), hexutil.Encode(calldata))
}
}
return nil
}
// Signature returns the function signature of the batch transaction.
func (bt *BatchTransaction) Signature() string {
types := make([]string, len(bt.Method.Inputs))
for i, input := range bt.Method.Inputs {
types[i] = buildFunctionSignature(input)
}
return fmt.Sprintf("%s(%s)", bt.Method.Name, strings.Join(types, ","))
}
func (bt *BatchTransaction) Arguments() abi.Arguments {
arguments := make(abi.Arguments, len(bt.Method.Inputs))
for i, input := range bt.Method.Inputs {
serialized, err := json.Marshal(input)
if err != nil {
panic(err)
}
var arg abi.Argument
if err := json.Unmarshal(serialized, &arg); err != nil {
panic(err)
}
arguments[i] = arg
}
return arguments
}
// UnmarshalJSON will unmarshal a BatchTransaction from JSON. // UnmarshalJSON will unmarshal a BatchTransaction from JSON.
func (b *BatchTransaction) UnmarshalJSON(data []byte) error { func (b *BatchTransaction) UnmarshalJSON(data []byte) error {
var bt batchTransactionMarshaling var bt batchTransactionMarshaling
...@@ -87,6 +250,9 @@ func (b *BatchTransaction) UnmarshalJSON(data []byte) error { ...@@ -87,6 +250,9 @@ func (b *BatchTransaction) UnmarshalJSON(data []byte) error {
} }
b.To = common.HexToAddress(bt.To) b.To = common.HexToAddress(bt.To)
b.Value = new(big.Int).SetUint64(bt.Value) b.Value = new(big.Int).SetUint64(bt.Value)
if bt.Data != nil {
b.Data = common.CopyBytes(*bt.Data)
}
b.Method = bt.Method b.Method = bt.Method
b.InputValues = bt.InputValues b.InputValues = bt.InputValues
return nil return nil
...@@ -101,8 +267,8 @@ func (b *BatchTransaction) MarshalJSON() ([]byte, error) { ...@@ -101,8 +267,8 @@ func (b *BatchTransaction) MarshalJSON() ([]byte, error) {
InputValues: b.InputValues, InputValues: b.InputValues,
} }
if len(b.Data) != 0 { if len(b.Data) != 0 {
hex := hexutil.Encode(b.Data) data := hexutil.Bytes(b.Data)
batch.Data = &hex batch.Data = &data
} }
return json.Marshal(batch) return json.Marshal(batch)
} }
...@@ -111,7 +277,7 @@ func (b *BatchTransaction) MarshalJSON() ([]byte, error) { ...@@ -111,7 +277,7 @@ func (b *BatchTransaction) MarshalJSON() ([]byte, error) {
type batchTransactionMarshaling struct { type batchTransactionMarshaling struct {
To string `json:"to"` To string `json:"to"`
Value uint64 `json:"value,string"` Value uint64 `json:"value,string"`
Data *string `json:"data"` Data *hexutil.Bytes `json:"data"`
Method ContractMethod `json:"contractMethod"` Method ContractMethod `json:"contractMethod"`
InputValues map[string]string `json:"contractInputsValues"` InputValues map[string]string `json:"contractInputsValues"`
} }
......
package safe
import (
"bytes"
"encoding/json"
"os"
"testing"
"github.com/stretchr/testify/require"
)
func TestBatchFileJSONPrepareBedrock(t *testing.T) {
testBatchFileJSON(t, "testdata/batch-prepare-bedrock.json")
}
func TestBatchFileJSONL2OO(t *testing.T) {
testBatchFileJSON(t, "testdata/l2-output-oracle.json")
}
func testBatchFileJSON(t *testing.T, path string) {
b, err := os.ReadFile(path)
require.NoError(t, err)
dec := json.NewDecoder(bytes.NewReader(b))
decoded := new(BatchFile)
require.NoError(t, dec.Decode(decoded))
data, err := json.Marshal(decoded)
require.NoError(t, err)
require.JSONEq(t, string(b), string(data))
}
package safe
import (
"fmt"
"math/big"
"reflect"
"strconv"
"strings"
"github.com/ethereum/go-ethereum/accounts/abi"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/common/hexutil"
)
// stringifyArg converts a Go type to a string that is representable by ABI.
// To do so, this function must be recursive to handle nested tuples.
func stringifyArg(argument any) (string, error) {
switch arg := argument.(type) {
case common.Address:
return arg.String(), nil
case *common.Address:
return arg.String(), nil
case *big.Int:
return arg.String(), nil
case big.Int:
return arg.String(), nil
case bool:
if arg {
return "true", nil
}
return "false", nil
case int64:
return strconv.FormatInt(arg, 10), nil
case int32:
return strconv.FormatInt(int64(arg), 10), nil
case int16:
return strconv.FormatInt(int64(arg), 10), nil
case int8:
return strconv.FormatInt(int64(arg), 10), nil
case int:
return strconv.FormatInt(int64(arg), 10), nil
case uint64:
return strconv.FormatUint(uint64(arg), 10), nil
case uint32:
return strconv.FormatUint(uint64(arg), 10), nil
case uint16:
return strconv.FormatUint(uint64(arg), 10), nil
case uint8:
return strconv.FormatUint(uint64(arg), 10), nil
case uint:
return strconv.FormatUint(uint64(arg), 10), nil
case []byte:
return hexutil.Encode(arg), nil
case []any:
ret := make([]string, len(arg))
for i, v := range arg {
str, err := stringifyArg(v)
if err != nil {
return "", err
}
ret[i] = str
}
return "[" + strings.Join(ret, ",") + "]", nil
default:
typ := reflect.TypeOf(argument)
if typ.Kind() == reflect.Ptr {
typ = typ.Elem()
}
if typ.Kind() == reflect.Struct {
v := reflect.ValueOf(argument)
numField := v.NumField()
ret := make([]string, numField)
for i := 0; i < numField; i++ {
val := v.Field(i).Interface()
str, err := stringifyArg(val)
if err != nil {
return "", err
}
ret[i] = str
}
return "[" + strings.Join(ret, ",") + "]", nil
}
return "", fmt.Errorf("unknown type as argument: %T", arg)
}
}
// unstringifyArg converts a string to a Go type.
func unstringifyArg(arg string, typ string) (any, error) {
switch typ {
case "address":
return common.HexToAddress(arg), nil
case "bool":
return strconv.ParseBool(arg)
case "uint8":
val, err := strconv.ParseUint(arg, 10, 8)
return uint8(val), err
case "uint16":
val, err := strconv.ParseUint(arg, 10, 16)
return uint16(val), err
case "uint32":
val, err := strconv.ParseUint(arg, 10, 32)
return uint32(val), err
case "uint64":
val, err := strconv.ParseUint(arg, 10, 64)
return val, err
case "int8":
val, err := strconv.ParseInt(arg, 10, 8)
return val, err
case "int16":
val, err := strconv.ParseInt(arg, 10, 16)
return val, err
case "int32":
val, err := strconv.ParseInt(arg, 10, 32)
return val, err
case "int64":
val, err := strconv.ParseInt(arg, 10, 64)
return val, err
case "uint256", "int256":
val, ok := new(big.Int).SetString(arg, 10)
if !ok {
return nil, fmt.Errorf("failed to parse %s as big.Int", arg)
}
return val, nil
case "string":
return arg, nil
case "bytes":
return hexutil.Decode(arg)
default:
return nil, fmt.Errorf("unknown type: %s", typ)
}
}
// createContractInput converts an abi.Argument to one or more ContractInputs.
func createContractInput(input abi.Argument, inputs []ContractInput) ([]ContractInput, error) {
inputType, err := stringifyType(input.Type)
if err != nil {
return nil, err
}
// TODO: could probably do better than string comparison?
internalType := input.Type.String()
if inputType == "tuple" {
internalType = input.Type.TupleRawName
}
components := make([]ContractInput, 0)
for i, elem := range input.Type.TupleElems {
e := *elem
arg := abi.Argument{
Name: input.Type.TupleRawNames[i],
Type: e,
}
component, err := createContractInput(arg, inputs)
if err != nil {
return nil, err
}
components = append(components, component...)
}
contractInput := ContractInput{
InternalType: internalType,
Name: input.Name,
Type: inputType,
Components: components,
}
inputs = append(inputs, contractInput)
return inputs, nil
}
// stringifyType turns an abi.Type into a string
func stringifyType(t abi.Type) (string, error) {
switch t.T {
case abi.TupleTy:
return "tuple", nil
case abi.BoolTy:
return t.String(), nil
case abi.AddressTy:
return t.String(), nil
case abi.UintTy:
return t.String(), nil
case abi.IntTy:
return t.String(), nil
case abi.StringTy:
return t.String(), nil
case abi.BytesTy:
return t.String(), nil
default:
return "", fmt.Errorf("unknown type: %d", t.T)
}
}
// buildFunctionSignature builds a function signature from a ContractInput.
// It is recursive to handle tuples.
func buildFunctionSignature(input ContractInput) string {
if input.Type == "tuple" {
types := make([]string, len(input.Components))
for i, component := range input.Components {
types[i] = buildFunctionSignature(component)
}
return fmt.Sprintf("(%s)", strings.Join(types, ","))
}
return input.InternalType
}
package safe
import (
"bytes"
"encoding/json"
"errors"
"math/big"
"os"
"testing"
"github.com/ethereum-optimism/optimism/op-bindings/bindings"
"github.com/ethereum/go-ethereum/accounts/abi"
"github.com/ethereum/go-ethereum/common"
"github.com/stretchr/testify/require"
)
func TestBatchJSONPrepareBedrock(t *testing.T) {
testBatchJSON(t, "testdata/batch-prepare-bedrock.json")
}
func TestBatchJSONL2OO(t *testing.T) {
testBatchJSON(t, "testdata/l2-output-oracle.json")
}
func testBatchJSON(t *testing.T, path string) {
b, err := os.ReadFile(path)
require.NoError(t, err)
dec := json.NewDecoder(bytes.NewReader(b))
decoded := new(Batch)
require.NoError(t, dec.Decode(decoded))
data, err := json.Marshal(decoded)
require.NoError(t, err)
require.JSONEq(t, string(b), string(data))
}
// TestBatchAddCallFinalizeWithdrawalTransaction ensures that structs can be serialized correctly.
func TestBatchAddCallFinalizeWithdrawalTransaction(t *testing.T) {
file, err := os.ReadFile("testdata/portal-abi.json")
require.NoError(t, err)
portalABI, err := abi.JSON(bytes.NewReader(file))
require.NoError(t, err)
sig := "finalizeWithdrawalTransaction"
argument := []any{
bindings.TypesWithdrawalTransaction{
Nonce: big.NewInt(0),
Sender: common.Address{19: 0x01},
Target: common.Address{19: 0x02},
Value: big.NewInt(1),
GasLimit: big.NewInt(2),
Data: []byte{},
},
}
batch := new(Batch)
to := common.Address{19: 0x01}
value := big.NewInt(222)
require.NoError(t, batch.AddCall(to, value, sig, argument, portalABI))
require.NoError(t, batch.Check())
require.Equal(t, batch.Transactions[0].Signature(), "finalizeWithdrawalTransaction((uint256,address,address,uint256,uint256,bytes))")
expected, err := os.ReadFile("testdata/finalize-withdrawal-tx.json")
require.NoError(t, err)
serialized, err := json.Marshal(batch)
require.NoError(t, err)
require.JSONEq(t, string(expected), string(serialized))
}
// TestBatchAddCallDespostTransaction ensures that simple calls can be serialized correctly.
func TestBatchAddCallDespositTransaction(t *testing.T) {
file, err := os.ReadFile("testdata/portal-abi.json")
require.NoError(t, err)
portalABI, err := abi.JSON(bytes.NewReader(file))
require.NoError(t, err)
batch := new(Batch)
to := common.Address{19: 0x01}
value := big.NewInt(222)
sig := "depositTransaction"
argument := []any{
common.Address{01},
big.NewInt(2),
uint64(100),
false,
[]byte{},
}
require.NoError(t, batch.AddCall(to, value, sig, argument, portalABI))
require.NoError(t, batch.Check())
require.Equal(t, batch.Transactions[0].Signature(), "depositTransaction(address,uint256,uint64,bool,bytes)")
expected, err := os.ReadFile("testdata/deposit-tx.json")
require.NoError(t, err)
serialized, err := json.Marshal(batch)
require.NoError(t, err)
require.JSONEq(t, string(expected), string(serialized))
}
// TestBatchCheck checks for the various failure cases of Batch.Check
// as well as a simple check for a valid batch.
func TestBatchCheck(t *testing.T) {
cases := []struct {
name string
bt BatchTransaction
err error
}{
{
name: "bad-input-count",
bt: BatchTransaction{
Method: ContractMethod{},
InputValues: map[string]string{
"foo": "bar",
},
},
err: errors.New("expected 0 inputs but got 1"),
},
{
name: "bad-calldata-too-small",
bt: BatchTransaction{
Data: []byte{0x01},
},
err: errors.New("must have at least 4 bytes of calldata, got 1"),
},
{
name: "bad-calldata-mismatch",
bt: BatchTransaction{
Data: []byte{0x01, 0x02, 0x03, 0x04},
Method: ContractMethod{
Name: "foo",
},
},
err: errors.New("data does not match signature"),
},
{
name: "good-calldata",
bt: BatchTransaction{
Data: []byte{0xc2, 0x98, 0x55, 0x78},
Method: ContractMethod{
Name: "foo",
},
},
err: nil,
},
}
for _, tc := range cases {
t.Run(tc.name, func(t *testing.T) {
require.Equal(t, tc.err, tc.bt.Check())
})
}
}
{
"version": "",
"chainId": "",
"createdAt": 0,
"meta": {
"createdFromSafeAddress": "",
"createdFromOwnerAddress": "",
"name": "",
"description": ""
},
"transactions": [
{
"to": "0x0000000000000000000000000000000000000001",
"value": "222",
"data": "0xe9e05c42000000000000000000000000010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000064000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000",
"contractMethod": {
"inputs": [
{
"internalType": "address",
"name": "_to",
"type": "address"
},
{
"internalType": "uint256",
"name": "_value",
"type": "uint256"
},
{
"internalType": "uint64",
"name": "_gasLimit",
"type": "uint64"
},
{
"internalType": "bool",
"name": "_isCreation",
"type": "bool"
},
{
"internalType": "bytes",
"name": "_data",
"type": "bytes"
}
],
"name": "depositTransaction",
"payable": false
},
"contractInputsValues": {
"_data": "0x",
"_gasLimit": "100",
"_isCreation": "false",
"_to": "0x0100000000000000000000000000000000000000",
"_value": "2"
}
}
]
}
{
"version": "",
"chainId": "",
"createdAt": 0,
"meta": {
"createdFromSafeAddress": "",
"createdFromOwnerAddress": "",
"name": "",
"description": ""
},
"transactions": [
{
"to": "0x0000000000000000000000000000000000000001",
"value": "222",
"data": "0x8c3152e900000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000c00000000000000000000000000000000000000000000000000000000000000000",
"contractMethod": {
"inputs": [
{
"internalType": "TypesWithdrawalTransaction",
"name": "_tx",
"type": "tuple",
"components": [
{
"internalType": "uint256",
"name": "nonce",
"type": "uint256"
},
{
"internalType": "address",
"name": "sender",
"type": "address"
},
{
"internalType": "address",
"name": "target",
"type": "address"
},
{
"internalType": "uint256",
"name": "value",
"type": "uint256"
},
{
"internalType": "uint256",
"name": "gasLimit",
"type": "uint256"
},
{
"internalType": "bytes",
"name": "data",
"type": "bytes"
}
]
}
],
"name": "finalizeWithdrawalTransaction",
"payable": false
},
"contractInputsValues": {
"_tx": "[0,0x0000000000000000000000000000000000000001,0x0000000000000000000000000000000000000002,1,2,0x]"
}
}
]
}
This diff is collapsed.
package fault package fault
import ( import (
"errors"
"fmt" "fmt"
"os" "os"
"path/filepath" "path/filepath"
"strings" "strings"
"github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common"
"github.com/hashicorp/go-multierror"
"golang.org/x/exp/slices" "golang.org/x/exp/slices"
) )
const gameDirPrefix = "game-" const gameDirPrefix = "game-"
// diskManager coordinates // diskManager coordinates the storage of game data on disk.
type diskManager struct { type diskManager struct {
datadir string datadir string
} }
...@@ -31,7 +31,7 @@ func (d *diskManager) RemoveAllExcept(keep []common.Address) error { ...@@ -31,7 +31,7 @@ func (d *diskManager) RemoveAllExcept(keep []common.Address) error {
if err != nil { if err != nil {
return fmt.Errorf("failed to list directory: %w", err) return fmt.Errorf("failed to list directory: %w", err)
} }
var result error var errs []error
for _, entry := range entries { for _, entry := range entries {
if !entry.IsDir() || !strings.HasPrefix(entry.Name(), gameDirPrefix) { if !entry.IsDir() || !strings.HasPrefix(entry.Name(), gameDirPrefix) {
// Skip files and directories that don't have the game directory prefix. // Skip files and directories that don't have the game directory prefix.
...@@ -42,16 +42,14 @@ func (d *diskManager) RemoveAllExcept(keep []common.Address) error { ...@@ -42,16 +42,14 @@ func (d *diskManager) RemoveAllExcept(keep []common.Address) error {
name := entry.Name()[len(gameDirPrefix):] name := entry.Name()[len(gameDirPrefix):]
addr := common.HexToAddress(name) addr := common.HexToAddress(name)
if addr == (common.Address{}) { if addr == (common.Address{}) {
// Couldn't parse the directory name to an address so mustn't be a game directory // Ignore directories with non-address names.
continue continue
} }
if slices.Contains(keep, addr) { if slices.Contains(keep, addr) {
// We need to preserve this data // Preserve data for games we should keep.
continue continue
} }
if err := os.RemoveAll(filepath.Join(d.datadir, entry.Name())); err != nil { errs = append(errs, os.RemoveAll(filepath.Join(d.datadir, entry.Name())))
result = multierror.Append(result, err)
}
} }
return result return errors.Join(errs...)
} }
...@@ -13,7 +13,7 @@ importers: ...@@ -13,7 +13,7 @@ importers:
version: 2.26.0 version: 2.26.0
'@codechecks/client': '@codechecks/client':
specifier: ^0.1.11 specifier: ^0.1.11
version: 0.1.11(typescript@5.1.6) version: 0.1.12(typescript@5.1.6)
devDependencies: devDependencies:
'@babel/eslint-parser': '@babel/eslint-parser':
specifier: ^7.18.2 specifier: ^7.18.2
...@@ -97,8 +97,8 @@ importers: ...@@ -97,8 +97,8 @@ importers:
specifier: 14.0.1 specifier: 14.0.1
version: 14.0.1 version: 14.0.1
markdownlint: markdownlint:
specifier: ^0.24.0 specifier: ^0.30.0
version: 0.24.0 version: 0.30.0
markdownlint-cli2: markdownlint-cli2:
specifier: 0.4.0 specifier: 0.4.0
version: 0.4.0 version: 0.4.0
...@@ -1204,8 +1204,8 @@ packages: ...@@ -1204,8 +1204,8 @@ packages:
prettier: 2.8.8 prettier: 2.8.8
dev: false dev: false
/@codechecks/client@0.1.11(typescript@5.1.6): /@codechecks/client@0.1.12(typescript@5.1.6):
resolution: {integrity: sha512-dSIzHnGNcXxDZtnVQEXWQHXH2v9KrpnK4mDGDxdwSu3l00rOIVwJcttj0wzx0bC0Q6gs65VsQdZH4gkanLdXOA==} resolution: {integrity: sha512-2GHHvhO3kaOyxFXxOaiznlY8ARmz33/p+WQdhc2y6wzWw5eOl2wSwg1eZxx3LsWlAnB963Y4bd1YjZcGIhKRzA==}
engines: {node: '>=6'} engines: {node: '>=6'}
hasBin: true hasBin: true
dependencies: dependencies:
...@@ -1221,7 +1221,7 @@ packages: ...@@ -1221,7 +1221,7 @@ packages:
lodash: 4.17.21 lodash: 4.17.21
marked: 0.7.0 marked: 0.7.0
marked-terminal: 3.3.0(marked@0.7.0) marked-terminal: 3.3.0(marked@0.7.0)
mkdirp: 0.5.5 mkdirp: 0.5.6
ms: 2.1.3 ms: 2.1.3
promise: 8.1.0 promise: 8.1.0
request: 2.88.2 request: 2.88.2
...@@ -11037,6 +11037,12 @@ packages: ...@@ -11037,6 +11037,12 @@ packages:
uc.micro: 1.0.6 uc.micro: 1.0.6
dev: true dev: true
/linkify-it@4.0.1:
resolution: {integrity: sha512-C7bfi1UZmoj8+PQx22XyeXCuBlokoyWQL5pWSP+EI6nzRylyThouddufc2c1NDIcP9k5agmN9fLpA7VNJfIiqw==}
dependencies:
uc.micro: 1.0.6
dev: true
/lint-staged@14.0.1: /lint-staged@14.0.1:
resolution: {integrity: sha512-Mw0cL6HXnHN1ag0mN/Dg4g6sr8uf8sn98w2Oc1ECtFto9tvRF7nkXGJRbx8gPlHyoR0pLyBr2lQHbWwmUHe1Sw==} resolution: {integrity: sha512-Mw0cL6HXnHN1ag0mN/Dg4g6sr8uf8sn98w2Oc1ECtFto9tvRF7nkXGJRbx8gPlHyoR0pLyBr2lQHbWwmUHe1Sw==}
engines: {node: ^16.14.0 || >=18.0.0} engines: {node: ^16.14.0 || >=18.0.0}
...@@ -11367,8 +11373,8 @@ packages: ...@@ -11367,8 +11373,8 @@ packages:
resolution: {integrity: sha512-hdN1wVrZbb29eBGiGjJbeP8JbKjq1urkHJ/LIP/NY48MZ1QVXUsQBV1G1zvYFHn1XE06cwjBsOI2K3Ulnj1YXQ==} resolution: {integrity: sha512-hdN1wVrZbb29eBGiGjJbeP8JbKjq1urkHJ/LIP/NY48MZ1QVXUsQBV1G1zvYFHn1XE06cwjBsOI2K3Ulnj1YXQ==}
engines: {node: '>=8'} engines: {node: '>=8'}
/markdown-it@12.2.0: /markdown-it@12.3.2:
resolution: {integrity: sha512-Wjws+uCrVQRqOoJvze4HCqkKl1AsSh95iFAeQDwnyfxM09divCBSXlDR1uTvyUP3Grzpn4Ru8GeCxYPM8vkCQg==} resolution: {integrity: sha512-TchMembfxfNVpHkbtriWltGWc+m3xszaRD0CZup7GFFhzIgQqxIfn3eGj1yZpfuflzPvfkt611B2Q/Bsk1YnGg==}
hasBin: true hasBin: true
dependencies: dependencies:
argparse: 2.0.1 argparse: 2.0.1
...@@ -11378,13 +11384,13 @@ packages: ...@@ -11378,13 +11384,13 @@ packages:
uc.micro: 1.0.6 uc.micro: 1.0.6
dev: true dev: true
/markdown-it@12.3.2: /markdown-it@13.0.1:
resolution: {integrity: sha512-TchMembfxfNVpHkbtriWltGWc+m3xszaRD0CZup7GFFhzIgQqxIfn3eGj1yZpfuflzPvfkt611B2Q/Bsk1YnGg==} resolution: {integrity: sha512-lTlxriVoy2criHP0JKRhO2VDG9c2ypWCsT237eDiLqi09rmbKoUetyGHq2uOIRoRS//kfoJckS0eUzzkDR+k2Q==}
hasBin: true hasBin: true
dependencies: dependencies:
argparse: 2.0.1 argparse: 2.0.1
entities: 2.1.0 entities: 3.0.1
linkify-it: 3.0.3 linkify-it: 4.0.1
mdurl: 1.0.1 mdurl: 1.0.1
uc.micro: 1.0.6 uc.micro: 1.0.6
dev: true dev: true
...@@ -11417,15 +11423,13 @@ packages: ...@@ -11417,15 +11423,13 @@ packages:
yaml: 1.10.2 yaml: 1.10.2
dev: true dev: true
/markdownlint-rule-helpers@0.16.0: /markdownlint-micromark@0.1.7:
resolution: {integrity: sha512-oEacRUVeTJ5D5hW1UYd2qExYI0oELdYK72k1TKGvIeYJIbqQWAz476NAc7LNixSySUhcNl++d02DvX0ccDk9/w==} resolution: {integrity: sha512-BbRPTC72fl5vlSKv37v/xIENSRDYL/7X/XoFzZ740FGEbs9vZerLrIkFRY0rv7slQKxDczToYuMmqQFN61fi4Q==}
engines: {node: '>=16'}
dev: true dev: true
/markdownlint@0.24.0: /markdownlint-rule-helpers@0.16.0:
resolution: {integrity: sha512-OJIGsGFV/rC9irI5E1FMy6v9hdACSwaa+EN3224Y5KG8zj2EYzdHOw0pOJovIYmjNfEZ9BtxUY4P7uYHTSNnbQ==} resolution: {integrity: sha512-oEacRUVeTJ5D5hW1UYd2qExYI0oELdYK72k1TKGvIeYJIbqQWAz476NAc7LNixSySUhcNl++d02DvX0ccDk9/w==}
engines: {node: '>=10'}
dependencies:
markdown-it: 12.2.0
dev: true dev: true
/markdownlint@0.25.1: /markdownlint@0.25.1:
...@@ -11435,6 +11439,14 @@ packages: ...@@ -11435,6 +11439,14 @@ packages:
markdown-it: 12.3.2 markdown-it: 12.3.2
dev: true dev: true
/markdownlint@0.30.0:
resolution: {integrity: sha512-nInuFvI/rEzanAOArW5490Ez4EYpB5ODqVM0mcDYCPx9DKJWCQqCgejjiCvbSeE7sjbDscVtZmwr665qpF5xGA==}
engines: {node: '>=16'}
dependencies:
markdown-it: 13.0.1
markdownlint-micromark: 0.1.7
dev: true
/marked-terminal@3.3.0(marked@0.7.0): /marked-terminal@3.3.0(marked@0.7.0):
resolution: {integrity: sha512-+IUQJ5VlZoAFsM5MHNT7g3RHSkA3eETqhRCdXv4niUMAKHQ7lb1yvAcuGPmm4soxhmtX13u4Li6ZToXtvSEH+A==} resolution: {integrity: sha512-+IUQJ5VlZoAFsM5MHNT7g3RHSkA3eETqhRCdXv4niUMAKHQ7lb1yvAcuGPmm4soxhmtX13u4Li6ZToXtvSEH+A==}
peerDependencies: peerDependencies:
...@@ -11956,19 +11968,11 @@ packages: ...@@ -11956,19 +11968,11 @@ packages:
engines: {node: '>= 8.0.0'} engines: {node: '>= 8.0.0'}
dev: false dev: false
/mkdirp@0.5.5:
resolution: {integrity: sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==}
hasBin: true
dependencies:
minimist: 1.2.8
dev: false
/mkdirp@0.5.6: /mkdirp@0.5.6:
resolution: {integrity: sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==} resolution: {integrity: sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==}
hasBin: true hasBin: true
dependencies: dependencies:
minimist: 1.2.8 minimist: 1.2.8
dev: true
/mkdirp@1.0.4: /mkdirp@1.0.4:
resolution: {integrity: sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==} resolution: {integrity: sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment