Commit 3f98cfec authored by vicotor's avatar vicotor

update genesis and sequence

parent 64cd64c6
...@@ -3,8 +3,7 @@ ...@@ -3,8 +3,7 @@
"signerProxy": "0x", "signerProxy": "0x",
"assets": { "assets": {
"usdc": { "usdc": {
"balance": "0x20000000000", "balance": "0x20000000000"
"frozen": "0x10"
} }
} }
} }
......
{ {
"AddressManager": "0x38C86ee2Ef59682F1239fC2e76c01DD22a8E765F", "AddressManager": "0x7373967117eF5E813a09683EB62634c2a90e737f",
"AnchorStateRegistry": "0x3AD9aEA9146305AA3fe100C24Ab5d113655402b5", "AnchorStateRegistry": "0xdA011bF9E63ECe6d36EacFad58afC507585094bE",
"AnchorStateRegistryProxy": "0xA1a27C9E3A42E6ae5Aa71AB86c2847f623d2d58f", "AnchorStateRegistryProxy": "0x4525c9f35e691548d9047D0BD8AE5D16A1f2F686",
"DelayedWETH": "0xf2ab45B561bfb98B93d8f60A264b94051e39CE99", "DelayedWETH": "0xF86d47635585535CF7fD85ADbB6DB4BEE0740eDA",
"DelayedWETHProxy": "0x02c2C68f58899fD50d656aD9653427eB2eF8fc4F", "DelayedWETHProxy": "0x29EE0323fbF06a4f73CB3cf7C6dB719f5fF6e8D2",
"DisputeGameFactory": "0x08370cF520A123C40b4F131EB2aF9CA126fB92F0", "DisputeGameFactory": "0x4a59D40708FB81B556c6d5560a22272Bd77bB963",
"DisputeGameFactoryProxy": "0xaa844f6Bd0c8DdD4b69FaB60454A99de32F617B1", "DisputeGameFactoryProxy": "0xdc0c5b67Be3C19Fd69A727827E43956f1c7390d4",
"L1CrossDomainMessenger": "0xD521a240E8E326739851d87E98e9cCE1299c384A", "L1CrossDomainMessenger": "0xC894FB8099d45d90019857950B3ABBc3d10485ee",
"L1CrossDomainMessengerProxy": "0x45e31d0219Ea35420561Ee878bc1e23824381886", "L1CrossDomainMessengerProxy": "0xcbc3A6637F4e4815A5ef8a03413E3AFAE6222cFF",
"L1ERC721Bridge": "0xFa69Aa42cf8e63F05d01E7080Bb197BFEf8D58Eb", "L1ERC721Bridge": "0xb7103A6a84f0404EcA4B65C8bcD7AE355EFA6e10",
"L1ERC721BridgeProxy": "0xA6679bEbBEF6Ce20D747eDd3FB07f7E64d6E42d5", "L1ERC721BridgeProxy": "0x2b2e346412Ceb4fA71023daa2CcAD0081C99778f",
"L1StandardBridge": "0xF72ec99912E6cbBF9affdD2159D339e2B402BD55", "L1StandardBridge": "0xF96646f42292980c715eE762627b089a575222ac",
"L1StandardBridgeProxy": "0x2B9c790A2AA3342aCc0ec474C8dB3Dee66bc65b3", "L1StandardBridgeProxy": "0xC32898646d933743D64C1B771BC11fFd2a84935D",
"L2OutputOracle": "0x25bBC605d5eDfbe28D270b1AEC9Fd80165117275", "L2OutputOracle": "0x02395833E261B27eD20278A7D1e2C3afFDd33205",
"L2OutputOracleProxy": "0xde73FE0b1400412704a8682214B3F94d909E6793", "L2OutputOracleProxy": "0xA7a7c56270D9540c5aD1F21a62F974B0bf981B1c",
"Mips": "0xF7F9bd0252b63683FA27d0D8bC8fb8DEac3752Fc", "Mips": "0x10dc18799178e75FdF63792d0Dc3a3108FEb4EE9",
"OptimismMintableERC20Factory": "0x72Ba20B8e2b828bDBfB9E4a97DcF24b24a370439", "OptimismMintableERC20Factory": "0x4a4CED14d27496d5445213f02360ab5d7a724638",
"OptimismMintableERC20FactoryProxy": "0x1fb6B85E2e5f5f85F99b1a598A4ba6A8D645dC64", "OptimismMintableERC20FactoryProxy": "0x6C1df6Da9f85316271eff4380d2Aa617505B8da8",
"OptimismPortal": "0xa9e1fE9a7a580Ef6B913d3353882422C1c77fc4F", "OptimismPortal": "0xD52C2CaFe8A33762639f7d118390Bd2d01CA9202",
"OptimismPortal2": "0xA1A34cB7A8373614e66a482855417e2a8013b6c9", "OptimismPortal2": "0x9C478Fd2e9818ac627E7a25731217FFDdCD355a7",
"OptimismPortalProxy": "0x6FCF4305D69926688B311DCa84D16c82f196C0a6", "OptimismPortalProxy": "0x4D9eaCc218bfC2ab7F1F5420A32A90790B7A98E5",
"PermissionedDelayedWETHProxy": "0xc7907fDE17D5dA89004bc0c122BD251Db304E749", "PermissionedDelayedWETHProxy": "0x803EF79f1ADaAd360779774d604e2dbdeC23330F",
"PreimageOracle": "0x19c9587B5f8d733f96Ca274834e64381DE2D1C83", "PreimageOracle": "0x1091e91f8546DFcFdf55782F42fbE655b1655C07",
"ProtocolVersions": "0x4640D6a7F62cae7AE5F3f28B17c085b6f2Cfd730", "ProtocolVersions": "0x43F00Ed9Ad50fD52c5606d97c4731986e998C910",
"ProtocolVersionsProxy": "0xd3168476874b3eC093B37251D0D4D50D1f3E3530", "ProtocolVersionsProxy": "0x005C4E78977BEF8418E71c01ed7dD4419e2665a9",
"ProxyAdmin": "0x7375B5c3B3b3Cb5A7643ceBa07987B500c09EecC", "ProxyAdmin": "0x96365FF4dA6FBBc13d92eA0D98615AC8B4AC9aE0",
"SafeProxyFactory": "0x66CB03A28d5E645f58bFDBcbCED35Fb1369918f0", "SafeProxyFactory": "0xEB673958Ac2341f90CFfd29803177aF0fdBB50f1",
"SafeSingleton": "0x55001679C3D561C4f5f4F203Ea61d5b0BcAD358A", "SafeSingleton": "0x99d50087B877Cd63Ba6bc1ea4C203c18754D58Fe",
"SuperchainConfig": "0x48Fe67C7adC998030d9b30bedaE8F992a9543620", "SuperchainConfig": "0xe22FBbE21E13335B6D3D64189A645d3e5a07cf5f",
"SuperchainConfigProxy": "0x4A7F1642c97dFa24C9E8FA1F814785BA98C32c48", "SuperchainConfigProxy": "0x532BFe9E2B7C7A253d5600c57DbBC08a646a32F6",
"SystemConfig": "0x0F3127dA04f2B791DBf02ED34FAF7E0b473707D5", "SystemConfig": "0x6ec9C99C7a5D96fD053cCd935134560e45A045fa",
"SystemConfigProxy": "0xd9FdB8291f8ea076Fb5735eBF699131a8C7bB723", "SystemConfigProxy": "0x29F55Df6CE21dB877191ac58256dB73B344c00D3",
"SystemOwnerSafe": "0xb1E9984ea197F4b570B2dC12A6A62f78EeD8C602" "SystemOwnerSafe": "0x77233e4EE8Fd6E78C66dfEF8614b9F0D0b13d871"
} }
\ No newline at end of file
{ {
"chainId": 42069, "chainId": 42069,
"timestamp": 1741313556, "timestamp": 1741598424,
"extraData": "0x", "extraData": "0x",
"accounts": { "accounts": {
"0x905D5E8F7db76bCA91fdcA0990be7263dfD23335": { "0x905D5E8F7db76bCA91fdcA0990be7263dfD23335": {
"assets": { "assets": {
"usdc": { "usdc": {
"balance": "0x20000000000", "balance": "0x20000000000"
"frozen": "0x10"
} }
} }
} }
......
{ {
"genesis": { "genesis": {
"l1": { "l1": {
"hash": "0x6422493f87d9d72d895b4813b782a8bb05f1c591d851b01ff82f809c82453247", "hash": "0x282c868315a1af2ab68e35120ad69245615607dd7fcab125c1ca492d9c3d525c",
"number": 88365 "number": 110497
}, },
"l2": { "l2": {
"hash": "0xee054561bf9bc7a73ac3c71c1df681284c55e42f9527414d0494573c9ed504e6", "hash": "0xc636faf0b7d86242a3dc5cbd2742561b632d78c7a9abaea9129e47bb69e88280",
"number": 0 "number": 0
}, },
"l2_time": 1741313556, "l2_time": 1741598424,
"system_config": { "system_config": {
"batcherAddr": "0x74fb49fb24700c896b6e68af0db872ac0cd97c0c", "batcherAddr": "0x74fb49fb24700c896b6e68af0db872ac0cd97c0c",
"overhead": "0x0000000000000000000000000000000000000000000000000000000000000000", "overhead": "0x0000000000000000000000000000000000000000000000000000000000000000",
...@@ -25,8 +25,8 @@ ...@@ -25,8 +25,8 @@
"canyon_time": 0, "canyon_time": 0,
"delta_time": 0, "delta_time": 0,
"batch_inbox_address": "0xff00000000000000000000000000000000042069", "batch_inbox_address": "0xff00000000000000000000000000000000042069",
"deposit_contract_address": "0x6fcf4305d69926688b311dca84d16c82f196c0a6", "deposit_contract_address": "0x4d9eacc218bfc2ab7f1f5420a32a90790b7a98e5",
"l1_system_config_address": "0xd9fdb8291f8ea076fb5735ebf699131a8c7bb723", "l1_system_config_address": "0x29f55df6ce21db877191ac58256db73b344c00d3",
"protocol_versions_address": "0x0000000000000000000000000000000000000000" "protocol_versions_address": "0x0000000000000000000000000000000000000000"
} }
...@@ -10,7 +10,6 @@ import ( ...@@ -10,7 +10,6 @@ import (
func (g GenesisAccount) MarshalJSON() ([]byte, error) { func (g GenesisAccount) MarshalJSON() ([]byte, error) {
type InnerWalletInfo struct { type InnerWalletInfo struct {
Balance *hexutil.Big `json:"balance"` Balance *hexutil.Big `json:"balance"`
Frozen *hexutil.Big `json:"frozen"`
} }
type GenesisAccount struct { type GenesisAccount struct {
SignerProxy *hexutil.Bytes `json:"signerProxy,omitempty"` SignerProxy *hexutil.Bytes `json:"signerProxy,omitempty"`
...@@ -28,11 +27,6 @@ func (g GenesisAccount) MarshalJSON() ([]byte, error) { ...@@ -28,11 +27,6 @@ func (g GenesisAccount) MarshalJSON() ([]byte, error) {
wallet.Balance = &balance wallet.Balance = &balance
} }
if v.Frozen != nil {
frozen := hexutil.Big(*v.Frozen)
wallet.Frozen = &frozen
}
enc.Assets[k] = wallet enc.Assets[k] = wallet
} }
return json.Marshal(&enc) return json.Marshal(&enc)
...@@ -42,7 +36,6 @@ func (g GenesisAccount) MarshalJSON() ([]byte, error) { ...@@ -42,7 +36,6 @@ func (g GenesisAccount) MarshalJSON() ([]byte, error) {
func (g *GenesisAccount) UnmarshalJSON(input []byte) error { func (g *GenesisAccount) UnmarshalJSON(input []byte) error {
type InnerWalletInfo struct { type InnerWalletInfo struct {
Balance *hexutil.Big `json:"balance"` Balance *hexutil.Big `json:"balance"`
Frozen *hexutil.Big `json:"frozen"`
} }
type GenesisAccount struct { type GenesisAccount struct {
SignerProxy *hexutil.Bytes `json:"signerProxy,omitempty"` SignerProxy *hexutil.Bytes `json:"signerProxy,omitempty"`
...@@ -63,9 +56,6 @@ func (g *GenesisAccount) UnmarshalJSON(input []byte) error { ...@@ -63,9 +56,6 @@ func (g *GenesisAccount) UnmarshalJSON(input []byte) error {
if v.Balance != nil { if v.Balance != nil {
wallet.Balance = new(big.Int).Set(v.Balance.ToInt()) wallet.Balance = new(big.Int).Set(v.Balance.ToInt())
} }
if v.Frozen != nil {
wallet.Frozen = new(big.Int).Set(v.Frozen.ToInt())
}
g.Assets[coin] = wallet g.Assets[coin] = wallet
} }
} }
......
package genesis package genesis
import ( import (
"bytes"
"encoding/json" "encoding/json"
"fmt" "fmt"
"github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/crypto" "github.com/ethereum/go-ethereum/crypto"
"github.com/exchain/go-exchain/exchain"
"github.com/exchain/go-exchain/exchain/chaindb" "github.com/exchain/go-exchain/exchain/chaindb"
nebulav1 "github.com/exchain/go-exchain/exchain/protocol/gen/go/nebula/v1" nebulav1 "github.com/exchain/go-exchain/exchain/protocol/gen/go/nebula/v1"
"github.com/exchain/go-exchain/metadb"
"github.com/golang/protobuf/proto" "github.com/golang/protobuf/proto"
"github.com/holiman/uint256" "github.com/holiman/uint256"
"math/big" "math/big"
"os" "os"
"sort"
) )
var ( var (
...@@ -21,7 +23,6 @@ var ( ...@@ -21,7 +23,6 @@ var (
type WalletInfo struct { type WalletInfo struct {
Balance *big.Int `json:"balance"` Balance *big.Int `json:"balance"`
Frozen *big.Int `json:"frozen"`
} }
type AssetsInfo map[string]WalletInfo type AssetsInfo map[string]WalletInfo
...@@ -41,6 +42,21 @@ type GenesisBlock struct { ...@@ -41,6 +42,21 @@ type GenesisBlock struct {
// GenesisAlloc specifies the initial state that is part of the genesis block. // GenesisAlloc specifies the initial state that is part of the genesis block.
type GenesisAlloc map[common.Address]GenesisAccount type GenesisAlloc map[common.Address]GenesisAccount
type SortedAddress []common.Address
func (a SortedAddress) Len() int { return len(a) }
func (a SortedAddress) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
func (a SortedAddress) Less(i, j int) bool { return bytes.Compare(a[i].Bytes(), a[j].Bytes()) < 0 }
func sortGenesisAllocs(allocs GenesisAlloc) []common.Address {
var keys []common.Address
for k := range allocs {
keys = append(keys, k)
}
sort.Sort(SortedAddress(keys))
return keys
}
func (ga *GenesisAlloc) UnmarshalJSON(data []byte) error { func (ga *GenesisAlloc) UnmarshalJSON(data []byte) error {
m := make(map[common.UnprefixedAddress]GenesisAccount) m := make(map[common.UnprefixedAddress]GenesisAccount)
if err := json.Unmarshal(data, &m); err != nil { if err := json.Unmarshal(data, &m); err != nil {
...@@ -53,13 +69,19 @@ func (ga *GenesisAlloc) UnmarshalJSON(data []byte) error { ...@@ -53,13 +69,19 @@ func (ga *GenesisAlloc) UnmarshalJSON(data []byte) error {
return nil return nil
} }
func (g *GenesisBlock) Commit(db metadb.Database) (err error) { func (g *GenesisBlock) Commit(engine exchain.Engine, chain chaindb.ChainDB) (err error) {
chain := chaindb.NewChainDB(db)
blk := g.ToBlock() blk := g.ToBlock()
result, err := engine.ProcessPayload(blk)
if err != nil {
return err
}
if err := chain.SaveChainId(uint256.NewInt(g.ChainId)); err != nil { if err := chain.SaveChainId(uint256.NewInt(g.ChainId)); err != nil {
return err return err
} }
return chain.SaveBlockData(blk, nil) if err := chain.SaveBlockData(blk, result.Receipts); err != nil {
return err
}
return nil
} }
func (g *GenesisBlock) ToBlock() *nebulav1.Block { func (g *GenesisBlock) ToBlock() *nebulav1.Block {
...@@ -74,8 +96,50 @@ func (g *GenesisBlock) ToBlock() *nebulav1.Block { ...@@ -74,8 +96,50 @@ func (g *GenesisBlock) ToBlock() *nebulav1.Block {
L1Height: 0, L1Height: 0,
SequenceNumber: 0, SequenceNumber: 0,
}, },
Transactions: &nebulav1.TransactionList{}, Transactions: &nebulav1.TransactionList{
Txs: make([]*nebulav1.Transaction, 0),
},
} }
accounts := sortGenesisAllocs(g.AllocInfo)
for _, account := range accounts {
info := g.AllocInfo[account]
if len(info.SingerProxy) > 0 {
tx := &nebulav1.Transaction{
TxType: nebulav1.TxType_SignProxyTx,
User: account.String(),
Nonce: exchain.GetNonce().Bytes(),
Proxy: false,
Tx: &nebulav1.Transaction_SignProxyTx{
SignProxyTx: &nebulav1.SignProxyTransaction{
SignerProxy: info.SingerProxy,
},
},
Signature: nil,
}
blk.Transactions.Txs = append(blk.Transactions.Txs, tx)
}
for asset, wallet := range info.Assets {
if wallet.Balance.Cmp(big.NewInt(0)) > 0 {
tx := &nebulav1.Transaction{
TxType: nebulav1.TxType_DepositTx,
User: account.String(),
Nonce: exchain.GetNonce().Bytes(),
Proxy: false,
Tx: &nebulav1.Transaction_DepositTx{
DepositTx: &nebulav1.DepositTransaction{
SourceHash: common.Hash{}.Bytes(),
User: account.Bytes(),
Coin: []byte(asset),
Amount: wallet.Balance.Bytes(),
},
},
Signature: nil,
}
blk.Transactions.Txs = append(blk.Transactions.Txs, tx)
}
}
}
hash := BlockHash(blk) hash := BlockHash(blk)
signature, _ := crypto.Sign(hash.Bytes(), genesisSignerKey) signature, _ := crypto.Sign(hash.Bytes(), genesisSignerKey)
blk.Header.Signature = signature blk.Header.Signature = signature
......
...@@ -49,6 +49,8 @@ func (m MockEngine) NewPayload(params exchain.PayloadParams) (exchain.ExecutionR ...@@ -49,6 +49,8 @@ func (m MockEngine) NewPayload(params exchain.PayloadParams) (exchain.ExecutionR
} }
func (m MockEngine) ProcessPayload(block *nebulav1.Block) (exchain.ExecutionResult, error) { func (m MockEngine) ProcessPayload(block *nebulav1.Block) (exchain.ExecutionResult, error) {
genesis := block.Header.Height == 0
if !genesis {
parent, err := m.chain.GetBlockByLabel(chaindb.ExChainBlockLatest) parent, err := m.chain.GetBlockByLabel(chaindb.ExChainBlockLatest)
if err != nil { if err != nil {
return exchain.ExecutionResult{}, err return exchain.ExecutionResult{}, err
...@@ -56,6 +58,7 @@ func (m MockEngine) ProcessPayload(block *nebulav1.Block) (exchain.ExecutionResu ...@@ -56,6 +58,7 @@ func (m MockEngine) ProcessPayload(block *nebulav1.Block) (exchain.ExecutionResu
if parent.Header.Height+1 != block.Header.Height { if parent.Header.Height+1 != block.Header.Height {
return exchain.ExecutionResult{}, fmt.Errorf("invalid block height") return exchain.ExecutionResult{}, fmt.Errorf("invalid block height")
} }
}
receipts, err := m.ProcessTx(block.Header, block.Transactions) receipts, err := m.ProcessTx(block.Header, block.Transactions)
if err != nil { if err != nil {
return exchain.ExecutionResult{}, err return exchain.ExecutionResult{}, err
......
...@@ -3,7 +3,9 @@ package genesis ...@@ -3,7 +3,9 @@ package genesis
import ( import (
"errors" "errors"
"fmt" "fmt"
"github.com/exchain/go-exchain/exchain/chaindb"
chaingenesis "github.com/exchain/go-exchain/exchain/genesis" chaingenesis "github.com/exchain/go-exchain/exchain/genesis"
"github.com/exchain/go-exchain/exchain/mockengine"
"github.com/exchain/go-exchain/exchain/wrapper" "github.com/exchain/go-exchain/exchain/wrapper"
"github.com/exchain/go-exchain/metadb/groupdb" "github.com/exchain/go-exchain/metadb/groupdb"
"time" "time"
...@@ -248,7 +250,9 @@ var Subcommands = cli.Commands{ ...@@ -248,7 +250,9 @@ var Subcommands = cli.Commands{
logger.Error("Failed to create genesis block", "err", err) logger.Error("Failed to create genesis block", "err", err)
return err return err
} }
if err = genblk.Commit(database); err != nil { chain := chaindb.NewChainDB(database)
engine := mockengine.NewEngine(chain)
if err = genblk.Commit(engine, chain); err != nil {
logger.Error("Failed to commit genesis block", "err", err) logger.Error("Failed to commit genesis block", "err", err)
} }
// wait db write to disk. // wait db write to disk.
......
...@@ -51,8 +51,8 @@ func (los *L1OriginSelector) OnEvent(ev event.Event) bool { ...@@ -51,8 +51,8 @@ func (los *L1OriginSelector) OnEvent(ev event.Event) bool {
switch x := ev.(type) { switch x := ev.(type) {
case engine.ForkchoiceUpdateEvent: case engine.ForkchoiceUpdateEvent:
los.onForkchoiceUpdate(x.UnsafeL2Head) los.onForkchoiceUpdate(x.UnsafeL2Head)
case rollup.ResetEvent: //case rollup.ResetEvent:
los.reset() // los.reset()
default: default:
return false return false
} }
...@@ -79,7 +79,8 @@ func (los *L1OriginSelector) FindL1Origin(ctx context.Context, l2Head eth.L2Bloc ...@@ -79,7 +79,8 @@ func (los *L1OriginSelector) FindL1Origin(ctx context.Context, l2Head eth.L2Bloc
msd := los.spec.MaxSequencerDrift(currentOrigin.Time) msd := los.spec.MaxSequencerDrift(currentOrigin.Time)
log := los.log.New("current", currentOrigin, "current_time", currentOrigin.Time, log := los.log.New("current", currentOrigin, "current_time", currentOrigin.Time,
"l2_head", l2Head, "l2_head_time", l2Head.Time, "max_seq_drift", msd) "l2_head", l2Head, "l2_head_time", l2Head.Time, "max_seq_drift", msd, "next_origin", nextOrigin)
log.Debug("print info")
pastSeqDrift := l2Head.Time+los.cfg.BlockTime-currentOrigin.Time > msd pastSeqDrift := l2Head.Time+los.cfg.BlockTime-currentOrigin.Time > msd
...@@ -100,6 +101,7 @@ func (los *L1OriginSelector) FindL1Origin(ctx context.Context, l2Head eth.L2Bloc ...@@ -100,6 +101,7 @@ func (los *L1OriginSelector) FindL1Origin(ctx context.Context, l2Head eth.L2Bloc
if err != nil { if err != nil {
return eth.L1BlockRef{}, fmt.Errorf("cannot build next L2 block past current L1 origin %s by more than sequencer time drift, and failed to find next L1 origin: %w", currentOrigin, err) return eth.L1BlockRef{}, fmt.Errorf("cannot build next L2 block past current L1 origin %s by more than sequencer time drift, and failed to find next L1 origin: %w", currentOrigin, err)
} }
log.Debug("fetch next origin", "next_origin", nextOrigin)
} }
// If the next origin is ahead of the L2 head, we must return the current origin. // If the next origin is ahead of the L2 head, we must return the current origin.
...@@ -117,6 +119,7 @@ func (los *L1OriginSelector) CurrentAndNextOrigin(ctx context.Context, l2Head et ...@@ -117,6 +119,7 @@ func (los *L1OriginSelector) CurrentAndNextOrigin(ctx context.Context, l2Head et
if l2Head.L1Origin == los.currentOrigin.ID() { if l2Head.L1Origin == los.currentOrigin.ID() {
// Most likely outcome: the L2 head is still on the current origin. // Most likely outcome: the L2 head is still on the current origin.
} else if l2Head.L1Origin == los.nextOrigin.ID() { } else if l2Head.L1Origin == los.nextOrigin.ID() {
los.log.Info("CurrentAndNextOrigin", "l2Head.Number", l2Head.Number, "l2Head.L1Origin", l2Head.L1Origin, "los.nextOrigin", los.nextOrigin)
// If the L2 head has progressed to the next origin, update the current and next origins. // If the L2 head has progressed to the next origin, update the current and next origins.
los.currentOrigin = los.nextOrigin los.currentOrigin = los.nextOrigin
los.nextOrigin = eth.L1BlockRef{} los.nextOrigin = eth.L1BlockRef{}
...@@ -151,7 +154,7 @@ func (los *L1OriginSelector) maybeSetNextOrigin(nextOrigin eth.L1BlockRef) { ...@@ -151,7 +154,7 @@ func (los *L1OriginSelector) maybeSetNextOrigin(nextOrigin eth.L1BlockRef) {
func (los *L1OriginSelector) onForkchoiceUpdate(unsafeL2Head eth.L2BlockRef) { func (los *L1OriginSelector) onForkchoiceUpdate(unsafeL2Head eth.L2BlockRef) {
// Only allow a relatively small window for fetching the next origin, as this is performed // Only allow a relatively small window for fetching the next origin, as this is performed
// on a best-effort basis. // on a best-effort basis.
ctx, cancel := context.WithTimeout(los.ctx, 2*time.Second) ctx, cancel := context.WithTimeout(los.ctx, 1*time.Second)
defer cancel() defer cancel()
currentOrigin, nextOrigin, err := los.CurrentAndNextOrigin(ctx, unsafeL2Head) currentOrigin, nextOrigin, err := los.CurrentAndNextOrigin(ctx, unsafeL2Head)
......
File deleted
# `ctb-go-ffi`
A lightweight binary for utilities accessed via `forge`'s `ffi` cheatcode in the `contracts-bedrock` test suite.
<pre>
├── go-ffi
│ ├── <a href="./bin.go">bin</a>: `go-ffi`'s binary
│ ├── <a href="./trie.go">trie</a>: Utility for generating random merkle trie roots / inclusion proofs
│ └── <a href="./differential-testing.go">diff-testing</a>: Utility for differential testing Solidity implementations against their respective Go implementations.
</pre>
## Usage
To build, run `just build-go-ffi` from this directory or the `contract-bedrock` package.
### In a Forge Test
To use `go-ffi` in a forge test, simply invoke the binary using the solidity `Process` library's `run` method.
```solidity
function myFFITest() public {
string[] memory commands = new string[](3);
commands[0] = "./scripts/go-ffi/go-ffi";
commands[1] = "trie";
commands[2] = "valid";
bytes memory result = Process.run(commands);
// Do something with the result of the command
}
```
### Available Modes
There are two modes available in `go-ffi`: `diff` and `trie`. Each are present as a subcommand to the `go-ffi` binary, with their own set of variants.
#### `diff`
> **Note**
> Variant required for diff mode.
| Variant | Description |
| ------------------------------------- | --------------------------------------------------------------------------------------------------------------------
| `decodeVersionedNonce` | Decodes a versioned nonce and prints the decoded arguments |
| `encodeCrossDomainMessage` | Encodes a cross domain message and prints the encoded message |
| `hashCrossDomainMessage` | Encodes and hashes a cross domain message and prints the digest |
| `hashDepositTransaction` | Encodes and hashes a deposit transaction and prints the digest |
| `encodeDepositTransaction` | RLP encodes a deposit transaction |
| `hashWithdrawal` | Hashes a withdrawal message and prints the digest |
| `hashOutputRootProof` | Hashes an output root proof and prints the digest |
| `getProveWithdrawalTransactionInputs` | Generates the inputs for a `getProveWithdrawalTransaction` call to the `OptimismPortal` given a withdrawal message |
| `cannonMemoryProof` | Computes a merkle proof of Cannon's memory |
#### `trie`
> **Note**
> Variant required for `trie` mode.
| Variant | Description |
| ----------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------- |
| `valid` | Generate a test case with a valid proof of inclusion for the k/v pair in the trie. |
| `extra_proof_elems` | Generate an invalid test case with an extra proof element attached to an otherwise valid proof of inclusion for the passed k/v. |
| `corrupted_proof` | Generate an invalid test case where the proof is malformed. |
| `invalid_data_remainder` | Generate an invalid test case where a random element of the proof has more bytes than the length designates within the RLP list encoding. |
| `invalid_large_internal_hash` | Generate an invalid test case where a long proof element is incorrect for the root. |
| `invalid_internal_node_hash` | Generate an invalid test case where a small proof element is incorrect for the root. |
| `prefixed_valid_key` | Generate a valid test case with a key that has been given a random prefix |
| `empty_key` | Generate a valid test case with a proof of inclusion for an empty key. |
| `partial_proof` | Generate an invalid test case with a partially correct proof |
package main
import (
"log"
"os"
)
func main() {
switch os.Args[1] {
case "diff":
DiffTestUtils()
case "trie":
FuzzTrie()
case "merkle":
DiffMerkle()
default:
log.Fatal("Must pass a subcommand")
}
}
package main
import (
"fmt"
"log"
"os"
"strconv"
"github.com/ethereum-optimism/optimism/op-challenger/game/keccak/merkle"
"github.com/ethereum/go-ethereum/accounts/abi"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/common/hexutil"
"github.com/ethereum/go-ethereum/crypto"
)
// VerifyMerkleProof verifies a merkle proof against the root hash and the leaf hash.
// Reference: https://github.com/ethereum/consensus-specs/blob/dev/specs/phase0/beacon-chain.md#is_valid_merkle_branch
func VerifyMerkleProof(root, leaf common.Hash, index uint64, proof [merkle.BinaryMerkleTreeDepth]common.Hash) bool {
value := leaf
for i := 0; i < merkle.BinaryMerkleTreeDepth; i++ {
if ((index >> i) & 1) == 1 {
value = crypto.Keccak256Hash(proof[i][:], value[:])
} else {
value = crypto.Keccak256Hash(value[:], proof[i][:])
}
}
return value == root
}
const (
// GenProof generates a merkle proof for a given leaf index by reconstructing the merkle tree from the passed
// leaves.
genProof = "gen_proof"
)
var (
rootAndProof, _ = abi.NewType("tuple", "", []abi.ArgumentMarshaling{
{Name: "root", Type: "bytes32"},
{Name: "proof", Type: "bytes32[]"},
})
merkleEncoder = abi.Arguments{
{Type: rootAndProof},
}
)
// DiffMerkle generates an abi-encoded `merkleTestCase` of a specified variant.
func DiffMerkle() {
variant := os.Args[2]
if len(variant) == 0 {
log.Fatal("Must pass a variant to the merkle diff tester!")
}
switch variant {
case genProof:
if len(os.Args) < 5 {
log.Fatal("Invalid arguments to `gen_proof` variant.")
}
rawLeaves, err := hexutil.Decode(os.Args[3])
if err != nil {
log.Fatal("Failed to decode leaves: ", err)
}
index, err := strconv.ParseInt(os.Args[4], 10, 64)
if err != nil {
log.Fatal("Failed to parse leaf index: ", err)
}
merkleTree := merkle.NewBinaryMerkleTree()
// Append all leaves to the merkle tree.
for i := 0; i < len(rawLeaves)/32; i++ {
leaf := common.BytesToHash(rawLeaves[i<<5 : (i+1)<<5])
merkleTree.AddLeaf(leaf)
}
// Generate the proof for the given index.
proof := merkleTree.ProofAtIndex(uint64(index))
// Generate the merkle root.
root := merkleTree.RootHash()
// Return "abi.encode(root, proof)"
packed, err := merkleEncoder.Pack(struct {
Root common.Hash
Proof [merkle.BinaryMerkleTreeDepth]common.Hash
}{
Root: root,
Proof: proof,
})
if err != nil {
log.Fatal("Failed to ABI encode root and proof: ", err)
}
fmt.Print(hexutil.Encode(packed[32:]))
default:
log.Fatal("Invalid variant passed to merkle diff tester!")
}
}
package main
import (
"crypto/rand"
"fmt"
"log"
"math/big"
"os"
"github.com/ethereum/go-ethereum/accounts/abi"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/common/hexutil"
"github.com/ethereum/go-ethereum/core/rawdb"
"github.com/ethereum/go-ethereum/rlp"
"github.com/ethereum/go-ethereum/trie"
"github.com/ethereum/go-ethereum/triedb"
)
// Variant enum
const (
// Generate a test case with a valid proof of inclusion for the k/v pair in the trie.
valid string = "valid"
// Generate an invalid test case with an extra proof element attached to an otherwise
// valid proof of inclusion for the passed k/v.
extraProofElems = "extra_proof_elems"
// Generate an invalid test case where the proof is malformed.
corruptedProof = "corrupted_proof"
// Generate an invalid test case where a random element of the proof has more bytes than the
// length designates within the RLP list encoding.
invalidDataRemainder = "invalid_data_remainder"
// Generate an invalid test case where a long proof element is incorrect for the root.
invalidLargeInternalHash = "invalid_large_internal_hash"
// Generate an invalid test case where a small proof element is incorrect for the root.
invalidInternalNodeHash = "invalid_internal_node_hash"
// Generate a valid test case with a key that has been given a random prefix
prefixedValidKey = "prefixed_valid_key"
// Generate a valid test case with a proof of inclusion for an empty key.
emptyKey = "empty_key"
// Generate an invalid test case with a partially correct proof
partialProof = "partial_proof"
)
// Generate an abi-encoded `trieTestCase` of a specified variant
func FuzzTrie() {
variant := os.Args[2]
if len(variant) < 1 {
log.Fatal("Must pass a variant to the trie fuzzer!")
}
var testCase trieTestCase
switch variant {
case valid:
testCase = genTrieTestCase(false)
case extraProofElems:
testCase = genTrieTestCase(false)
// Duplicate the last element of the proof
testCase.Proof = append(testCase.Proof, [][]byte{testCase.Proof[len(testCase.Proof)-1]}...)
case corruptedProof:
testCase = genTrieTestCase(false)
// Re-encode a random element within the proof
idx := randRange(0, int64(len(testCase.Proof)))
encoded, _ := rlp.EncodeToBytes(testCase.Proof[idx])
testCase.Proof[idx] = encoded
case invalidDataRemainder:
testCase = genTrieTestCase(false)
// Alter true length of random proof element by appending random bytes
// Do not update the encoded length
idx := randRange(0, int64(len(testCase.Proof)))
b := make([]byte, randRange(1, 512))
if _, err := rand.Read(b); err != nil {
log.Fatal("Error generating random bytes")
}
testCase.Proof[idx] = append(testCase.Proof[idx], b...)
case invalidLargeInternalHash:
testCase = genTrieTestCase(false)
// Clobber 4 bytes within a list element of a random proof element
// TODO: Improve this by decoding the proof elem and choosing random
// bytes to overwrite.
idx := randRange(1, int64(len(testCase.Proof)))
b := make([]byte, 4)
if _, err := rand.Read(b); err != nil {
log.Fatal("Error generating random bytes")
}
testCase.Proof[idx] = append(
testCase.Proof[idx][:20],
append(
b,
testCase.Proof[idx][24:]...,
)...,
)
case invalidInternalNodeHash:
testCase = genTrieTestCase(false)
// Assign the last proof element to an encoded list containing a
// random 29 byte value
b := make([]byte, 29)
if _, err := rand.Read(b); err != nil {
log.Fatal("Error generating random bytes")
}
e, _ := rlp.EncodeToBytes(b)
testCase.Proof[len(testCase.Proof)-1] = append([]byte{0xc0 + 30}, e...)
case prefixedValidKey:
testCase = genTrieTestCase(false)
b := make([]byte, randRange(1, 16))
if _, err := rand.Read(b); err != nil {
log.Fatal("Error generating random bytes")
}
testCase.Key = append(b, testCase.Key...)
case emptyKey:
testCase = genTrieTestCase(true)
case partialProof:
testCase = genTrieTestCase(false)
// Cut the proof in half
proofLen := len(testCase.Proof)
newProof := make([][]byte, proofLen/2)
for i := 0; i < proofLen/2; i++ {
newProof[i] = testCase.Proof[i]
}
testCase.Proof = newProof
default:
log.Fatal("Invalid variant passed to trie fuzzer!")
}
// Print encoded test case with no newline so that foundry's FFI can read the output
fmt.Print(testCase.AbiEncode())
}
// Generate a random test case for Bedrock's MerkleTrie verifier.
func genTrieTestCase(selectEmptyKey bool) trieTestCase {
// Create an empty merkle trie
memdb := rawdb.NewMemoryDatabase()
randTrie := trie.NewEmpty(triedb.NewDatabase(memdb, nil))
// Get a random number of elements to put into the trie
randN := randRange(2, 1024)
// Get a random key/value pair to generate a proof of inclusion for
randSelect := randRange(0, randN)
// Create a fixed-length key as well as a randomly-sized value
// We create these out of the loop to reduce mem allocations.
randKey := make([]byte, 32)
randValue := make([]byte, randRange(2, 1024))
// Randomly selected key/value for proof generation
var key []byte
var value []byte
// Add `randN` elements to the trie
for i := int64(0); i < randN; i++ {
// Randomize the contents of `randKey` and `randValue`
if _, err := rand.Read(randKey); err != nil {
log.Fatal("Error generating random bytes")
}
if _, err := rand.Read(randValue); err != nil {
log.Fatal("Error generating random bytes")
}
// Clear the selected key if `selectEmptyKey` is true
if i == randSelect && selectEmptyKey {
randKey = make([]byte, 0)
}
// Insert the random k/v pair into the trie
if err := randTrie.Update(randKey, randValue); err != nil {
log.Fatal("Error adding key-value pair to trie")
}
// If this is our randomly selected k/v pair, store it in `key` & `value`
if i == randSelect {
key = randKey
value = randValue
}
}
// Generate proof for `key`'s inclusion in our trie
var proof proofList
if err := randTrie.Prove(key, &proof); err != nil {
log.Fatal("Error creating proof for randomly selected key's inclusion in generated trie")
}
// Create our test case with the data collected
testCase := trieTestCase{
Root: randTrie.Hash(),
Key: key,
Value: value,
Proof: proof,
}
return testCase
}
// Represents a test case for bedrock's `MerkleTrie.sol`
type trieTestCase struct {
Root common.Hash
Key []byte
Value []byte
Proof [][]byte
}
// Tuple type to encode `TrieTestCase`
var (
trieTestCaseTuple, _ = abi.NewType("tuple", "TrieTestCase", []abi.ArgumentMarshaling{
{Name: "root", Type: "bytes32"},
{Name: "key", Type: "bytes"},
{Name: "value", Type: "bytes"},
{Name: "proof", Type: "bytes[]"},
})
encoder = abi.Arguments{
{Type: trieTestCaseTuple},
}
)
// Encodes the trieTestCase as the `trieTestCaseTuple`.
func (t *trieTestCase) AbiEncode() string {
// Encode the contents of the struct as a tuple
packed, err := encoder.Pack(&t)
if err != nil {
log.Fatalf("Error packing TrieTestCase: %v", err)
}
// Remove the pointer and encode the packed bytes as a hex string
return hexutil.Encode(packed[32:])
}
// Helper that generates a cryptographically secure random 64-bit integer
// between the range [min, max)
func randRange(min int64, max int64) int64 {
r, err := rand.Int(rand.Reader, new(big.Int).Sub(new(big.Int).SetInt64(max), new(big.Int).SetInt64(min)))
if err != nil {
log.Fatal("Failed to generate random number within bounds")
}
return (new(big.Int).Add(r, new(big.Int).SetInt64(min))).Int64()
}
package main
import (
"errors"
"fmt"
"math/big"
"github.com/ethereum-optimism/optimism/op-chain-ops/crossdomain"
"github.com/ethereum-optimism/optimism/op-node/bindings"
"github.com/ethereum-optimism/optimism/op-node/rollup"
"github.com/ethereum-optimism/optimism/op-node/rollup/derive"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/core/types"
)
var UnknownNonceVersion = errors.New("Unknown nonce version")
// checkOk checks if ok is false, and panics if so.
// Shorthand to ease go's god awful error handling
func checkOk(ok bool) {
if !ok {
panic(fmt.Errorf("checkOk failed"))
}
}
// checkErr checks if err is not nil, and throws if so.
// Shorthand to ease go's god awful error handling
func checkErr(err error, failReason string) {
if err != nil {
panic(fmt.Errorf("%s: %w", failReason, err))
}
}
// encodeCrossDomainMessage encodes a versioned cross domain message into a byte array.
func encodeCrossDomainMessage(nonce *big.Int, sender common.Address, target common.Address, value *big.Int, gasLimit *big.Int, data []byte) ([]byte, error) {
_, version := crossdomain.DecodeVersionedNonce(nonce)
var encoded []byte
var err error
if version.Cmp(big.NewInt(0)) == 0 {
// Encode cross domain message V0
encoded, err = crossdomain.EncodeCrossDomainMessageV0(target, sender, data, nonce)
} else if version.Cmp(big.NewInt(1)) == 0 {
// Encode cross domain message V1
encoded, err = crossdomain.EncodeCrossDomainMessageV1(nonce, sender, target, value, gasLimit, data)
} else {
return nil, UnknownNonceVersion
}
return encoded, err
}
// hashWithdrawal hashes a withdrawal transaction.
func hashWithdrawal(nonce *big.Int, sender common.Address, target common.Address, value *big.Int, gasLimit *big.Int, data []byte) (common.Hash, error) {
wd := crossdomain.Withdrawal{
Nonce: nonce,
Sender: &sender,
Target: &target,
Value: value,
GasLimit: gasLimit,
Data: data,
}
return wd.Hash()
}
// hashOutputRootProof hashes an output root proof.
func hashOutputRootProof(version common.Hash, stateRoot common.Hash, messagePasserStorageRoot common.Hash, latestBlockHash common.Hash) (common.Hash, error) {
hash, err := rollup.ComputeL2OutputRoot(&bindings.TypesOutputRootProof{
Version: version,
StateRoot: stateRoot,
MessagePasserStorageRoot: messagePasserStorageRoot,
LatestBlockhash: latestBlockHash,
})
if err != nil {
return common.Hash{}, err
}
return common.Hash(hash), nil
}
// makeDepositTx creates a deposit transaction type.
func makeDepositTx(
from common.Address,
to common.Address,
value *big.Int,
mint *big.Int,
gasLimit *big.Int,
isCreate bool,
data []byte,
l1BlockHash common.Hash,
logIndex *big.Int,
) types.DepositTx {
// Create deposit transaction source
udp := derive.UserDepositSource{
L1BlockHash: l1BlockHash,
LogIndex: logIndex.Uint64(),
}
// Create deposit transaction
depositTx := types.DepositTx{
SourceHash: udp.SourceHash(),
From: from,
Value: value,
Gas: gasLimit.Uint64(),
IsSystemTransaction: false, // This will never be a system transaction in the tests.
Data: data,
}
// Fill optional fields
if mint.Cmp(big.NewInt(0)) == 1 {
depositTx.Mint = mint
}
if !isCreate {
depositTx.To = &to
}
return depositTx
}
// Custom type to write the generated proof to
type proofList [][]byte
func (n *proofList) Put(key []byte, value []byte) error {
*n = append(*n, value)
return nil
}
func (n *proofList) Delete(key []byte) error {
panic("not supported")
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment