Commit da09a10d authored by Matthew Slipper's avatar Matthew Slipper Committed by GitHub

Merge pull request #2234 from ethereum-optimism/feat/zlib-compression

feat: typed batches and batch compression
parents d0853b12 b1697ad3
---
'@eth-optimism/integration-tests': patch
---
Add test coverage for zlib compressed batches
---
'@eth-optimism/data-transport-layer': patch
---
Enable typed batch support
---
'@eth-optimism/batch-submitter': patch
---
Update to allow for zlib compressed batches
---
'@eth-optimism/batch-submitter-service': patch
---
Enable the usage of typed batches and type 0 zlib compressed batches
---
'@eth-optimism/core-utils': patch
---
Update batch serialization with typed batches and zlib compression
...@@ -14,3 +14,4 @@ l2geth/signer/fourbyte ...@@ -14,3 +14,4 @@ l2geth/signer/fourbyte
l2geth/cmd/puppeth l2geth/cmd/puppeth
l2geth/cmd/clef l2geth/cmd/clef
go/gas-oracle/gas-oracle go/gas-oracle/gas-oracle
go/batch-submitter/batch-submitter
...@@ -20,10 +20,15 @@ jobs: ...@@ -20,10 +20,15 @@ jobs:
- 5000:5000 - 5000:5000
strategy: strategy:
matrix: matrix:
batch-submitter: [ts-batch-submitter, go-batch-submitter] batch-submitter:
- ts-batch-submitter
- go-batch-submitter
batch-type:
- zlib
- legacy
env: env:
DOCKER_BUILDKIT: 1 DOCKER_BUILDKIT: 1
COMPOSE_DOCKER_CLI_BUILD: 1 COMPOSE_DOCKER_CLI_BUILD: 1
steps: steps:
# Monorepo tests # Monorepo tests
- uses: actions/checkout@v2 - uses: actions/checkout@v2
...@@ -40,6 +45,10 @@ jobs: ...@@ -40,6 +45,10 @@ jobs:
restore-keys: | restore-keys: |
${{ runner.os }}-yarn- ${{ runner.os }}-yarn-
- name: Set conditional env vars
run: |
echo "BATCH_SUBMITTER_SEQUENCER_BATCH_TYPE=${{ matrix.batch-type }}" >> $GITHUB_ENV
- name: Bring the stack up - name: Bring the stack up
working-directory: ./ops working-directory: ./ops
run: | run: |
......
...@@ -125,6 +125,7 @@ func Main(gitVersion string) func(ctx *cli.Context) error { ...@@ -125,6 +125,7 @@ func Main(gitVersion string) func(ctx *cli.Context) error {
CTCAddr: ctcAddress, CTCAddr: ctcAddress,
ChainID: chainID, ChainID: chainID,
PrivKey: sequencerPrivKey, PrivKey: sequencerPrivKey,
BatchType: sequencer.BatchTypeFromString(cfg.SequencerBatchType),
}) })
if err != nil { if err != nil {
return err return err
......
...@@ -33,6 +33,11 @@ var ( ...@@ -33,6 +33,11 @@ var (
ErrSameSequencerAndProposerPrivKey = errors.New("sequencer-priv-key and " + ErrSameSequencerAndProposerPrivKey = errors.New("sequencer-priv-key and " +
"proposer-priv-key must be distinct") "proposer-priv-key must be distinct")
// ErrInvalidBatchType signals that an unsupported batch type is being
// configured. The default is "legacy" and the options are "legacy" or
// "zlib"
ErrInvalidBatchType = errors.New("invalid batch type")
// ErrSentryDSNNotSet signals that not Data Source Name was provided // ErrSentryDSNNotSet signals that not Data Source Name was provided
// with which to configure Sentry logging. // with which to configure Sentry logging.
ErrSentryDSNNotSet = errors.New("sentry-dsn must be set if use-sentry " + ErrSentryDSNNotSet = errors.New("sentry-dsn must be set if use-sentry " +
...@@ -164,6 +169,9 @@ type Config struct { ...@@ -164,6 +169,9 @@ type Config struct {
// the proposer transactions. // the proposer transactions.
ProposerHDPath string ProposerHDPath string
// SequencerBatchType represents the type of batch the sequencer submits.
SequencerBatchType string
// MetricsServerEnable if true, will create a metrics client and log to // MetricsServerEnable if true, will create a metrics client and log to
// Prometheus. // Prometheus.
MetricsServerEnable bool MetricsServerEnable bool
...@@ -212,6 +220,7 @@ func NewConfig(ctx *cli.Context) (Config, error) { ...@@ -212,6 +220,7 @@ func NewConfig(ctx *cli.Context) (Config, error) {
Mnemonic: ctx.GlobalString(flags.MnemonicFlag.Name), Mnemonic: ctx.GlobalString(flags.MnemonicFlag.Name),
SequencerHDPath: ctx.GlobalString(flags.SequencerHDPathFlag.Name), SequencerHDPath: ctx.GlobalString(flags.SequencerHDPathFlag.Name),
ProposerHDPath: ctx.GlobalString(flags.ProposerHDPathFlag.Name), ProposerHDPath: ctx.GlobalString(flags.ProposerHDPathFlag.Name),
SequencerBatchType: ctx.GlobalString(flags.SequencerBatchType.Name),
MetricsServerEnable: ctx.GlobalBool(flags.MetricsServerEnableFlag.Name), MetricsServerEnable: ctx.GlobalBool(flags.MetricsServerEnableFlag.Name),
MetricsHostname: ctx.GlobalString(flags.MetricsHostnameFlag.Name), MetricsHostname: ctx.GlobalString(flags.MetricsHostnameFlag.Name),
MetricsPort: ctx.GlobalUint64(flags.MetricsPortFlag.Name), MetricsPort: ctx.GlobalUint64(flags.MetricsPortFlag.Name),
...@@ -265,6 +274,12 @@ func ValidateConfig(cfg *Config) error { ...@@ -265,6 +274,12 @@ func ValidateConfig(cfg *Config) error {
return ErrSameSequencerAndProposerPrivKey return ErrSameSequencerAndProposerPrivKey
} }
usingTypedBatches := cfg.SequencerBatchType != ""
validBatchType := cfg.SequencerBatchType == "legacy" || cfg.SequencerBatchType == "zlib"
if usingTypedBatches && !validBatchType {
return ErrInvalidBatchType
}
// Ensure the Sentry Data Source Name is set when using Sentry. // Ensure the Sentry Data Source Name is set when using Sentry.
if cfg.SentryEnable && cfg.SentryDsn == "" { if cfg.SentryEnable && cfg.SentryDsn == "" {
return ErrSentryDSNNotSet return ErrSentryDSNNotSet
......
...@@ -78,6 +78,7 @@ func GenSequencerBatchParams( ...@@ -78,6 +78,7 @@ func GenSequencerBatchParams(
shouldStartAtElement uint64, shouldStartAtElement uint64,
blockOffset uint64, blockOffset uint64,
batch []BatchElement, batch []BatchElement,
batchType BatchType,
) (*AppendSequencerBatchParams, error) { ) (*AppendSequencerBatchParams, error) {
var ( var (
...@@ -188,5 +189,6 @@ func GenSequencerBatchParams( ...@@ -188,5 +189,6 @@ func GenSequencerBatchParams(
TotalElementsToAppend: uint64(len(batch)), TotalElementsToAppend: uint64(len(batch)),
Contexts: contexts, Contexts: contexts,
Txs: txs, Txs: txs,
Type: batchType,
}, nil }, nil
} }
...@@ -36,6 +36,7 @@ type Config struct { ...@@ -36,6 +36,7 @@ type Config struct {
CTCAddr common.Address CTCAddr common.Address
ChainID *big.Int ChainID *big.Int
PrivKey *ecdsa.PrivateKey PrivKey *ecdsa.PrivateKey
BatchType BatchType
} }
type Driver struct { type Driver struct {
...@@ -160,7 +161,7 @@ func (d *Driver) CraftBatchTx( ...@@ -160,7 +161,7 @@ func (d *Driver) CraftBatchTx(
name := d.cfg.Name name := d.cfg.Name
log.Info(name+" crafting batch tx", "start", start, "end", end, log.Info(name+" crafting batch tx", "start", start, "end", end,
"nonce", nonce) "nonce", nonce, "type", d.cfg.BatchType.String())
var ( var (
batchElements []BatchElement batchElements []BatchElement
...@@ -195,7 +196,7 @@ func (d *Driver) CraftBatchTx( ...@@ -195,7 +196,7 @@ func (d *Driver) CraftBatchTx(
var pruneCount int var pruneCount int
for { for {
batchParams, err := GenSequencerBatchParams( batchParams, err := GenSequencerBatchParams(
shouldStartAt, d.cfg.BlockOffset, batchElements, shouldStartAt, d.cfg.BlockOffset, batchElements, d.cfg.BatchType,
) )
if err != nil { if err != nil {
return nil, err return nil, err
......
package sequencer package sequencer
import ( import (
"bufio"
"bytes" "bytes"
"compress/zlib"
"encoding/binary" "encoding/binary"
"errors"
"fmt" "fmt"
"io" "io"
"math" "math"
...@@ -17,7 +20,13 @@ const ( ...@@ -17,7 +20,13 @@ const (
TxLenSize = 3 TxLenSize = 3
) )
var byteOrder = binary.BigEndian var (
// byteOrder represents the endiannes used for batch serialization
byteOrder = binary.BigEndian
// ErrMalformedBatch represents a batch that is not well formed
// according to the protocol specification
ErrMalformedBatch = errors.New("malformed batch")
)
// BatchContext denotes a range of transactions that belong the same batch. It // BatchContext denotes a range of transactions that belong the same batch. It
// is used to compress shared fields that would otherwise be repeated for each // is used to compress shared fields that would otherwise be repeated for each
...@@ -44,11 +53,14 @@ type BatchContext struct { ...@@ -44,11 +53,14 @@ type BatchContext struct {
// - num_subsequent_queue_txs: 3 bytes // - num_subsequent_queue_txs: 3 bytes
// - timestamp: 5 bytes // - timestamp: 5 bytes
// - block_number: 5 bytes // - block_number: 5 bytes
//
// Note that writing to a bytes.Buffer cannot
// error, so errors are ignored here
func (c *BatchContext) Write(w *bytes.Buffer) { func (c *BatchContext) Write(w *bytes.Buffer) {
writeUint64(w, c.NumSequencedTxs, 3) _ = writeUint64(w, c.NumSequencedTxs, 3)
writeUint64(w, c.NumSubsequentQueueTxs, 3) _ = writeUint64(w, c.NumSubsequentQueueTxs, 3)
writeUint64(w, c.Timestamp, 5) _ = writeUint64(w, c.Timestamp, 5)
writeUint64(w, c.BlockNumber, 5) _ = writeUint64(w, c.BlockNumber, 5)
} }
// Read decodes the BatchContext from the passed reader. If fewer than 16-bytes // Read decodes the BatchContext from the passed reader. If fewer than 16-bytes
...@@ -71,6 +83,45 @@ func (c *BatchContext) Read(r io.Reader) error { ...@@ -71,6 +83,45 @@ func (c *BatchContext) Read(r io.Reader) error {
return readUint64(r, &c.BlockNumber, 5) return readUint64(r, &c.BlockNumber, 5)
} }
// BatchType represents the type of batch being
// submitted. When the first context in the batch
// has a timestamp of 0, the blocknumber is interpreted
// as an enum that represets the type
type BatchType int8
// Implements the Stringer interface for BatchType
func (b BatchType) String() string {
switch b {
case BatchTypeLegacy:
return "LEGACY"
case BatchTypeZlib:
return "ZLIB"
default:
return ""
}
}
// BatchTypeFromString returns the BatchType
// enum based on a human readable string
func BatchTypeFromString(s string) BatchType {
switch s {
case "zlib", "ZLIB":
return BatchTypeZlib
case "legacy", "LEGACY":
return BatchTypeLegacy
default:
return BatchTypeLegacy
}
}
const (
// BatchTypeLegacy represets the legacy batch type
BatchTypeLegacy BatchType = -1
// BatchTypeZlib represents a batch type where the
// transaction data is compressed using zlib
BatchTypeZlib BatchType = 0
)
// AppendSequencerBatchParams holds the raw data required to submit a batch of // AppendSequencerBatchParams holds the raw data required to submit a batch of
// L2 txs to L1 CTC contract. Rather than encoding the objects using the // L2 txs to L1 CTC contract. Rather than encoding the objects using the
// standard ABI encoding, a custom encoding is and provided in the call data to // standard ABI encoding, a custom encoding is and provided in the call data to
...@@ -95,6 +146,9 @@ type AppendSequencerBatchParams struct { ...@@ -95,6 +146,9 @@ type AppendSequencerBatchParams struct {
// Txs contains all sequencer txs that will be recorded in the L1 CTC // Txs contains all sequencer txs that will be recorded in the L1 CTC
// contract. // contract.
Txs []*CachedTx Txs []*CachedTx
// The type of the batch
Type BatchType
} }
// Write encodes the AppendSequencerBatchParams using the following format: // Write encodes the AppendSequencerBatchParams using the following format:
...@@ -105,20 +159,73 @@ type AppendSequencerBatchParams struct { ...@@ -105,20 +159,73 @@ type AppendSequencerBatchParams struct {
// - [num txs ommitted] // - [num txs ommitted]
// - tx_len: 3 bytes // - tx_len: 3 bytes
// - tx_bytes: tx_len bytes // - tx_bytes: tx_len bytes
//
// Typed batches include a dummy context as the first context
// where the timestamp is 0. The blocknumber is interpreted
// as an enum that defines the type. It is impossible to have
// a timestamp of 0 in practice, so this safely can indicate
// that the batch is typed.
// Type 0 batches have a dummy context where the blocknumber is
// set to 0. The transaction data is compressed with zlib before
// submitting the transaction to the chain. The fields should_start_at_element,
// total_elements_to_append, num_contexts and the contexts themselves
// are not altered.
//
// Note that writing to a bytes.Buffer cannot
// error, so errors are ignored here
func (p *AppendSequencerBatchParams) Write(w *bytes.Buffer) error { func (p *AppendSequencerBatchParams) Write(w *bytes.Buffer) error {
writeUint64(w, p.ShouldStartAtElement, 5) _ = writeUint64(w, p.ShouldStartAtElement, 5)
writeUint64(w, p.TotalElementsToAppend, 3) _ = writeUint64(w, p.TotalElementsToAppend, 3)
// There must be contexts if there are transactions
if len(p.Contexts) == 0 && len(p.Txs) != 0 {
return ErrMalformedBatch
}
// There must be transactions if there are contexts
if len(p.Txs) == 0 && len(p.Contexts) != 0 {
return ErrMalformedBatch
}
// copy the contexts as to not malleate the struct
// when it is a typed batch
contexts := make([]BatchContext, 0, len(p.Contexts)+1)
if p.Type == BatchTypeZlib {
// All zero values for the single batch context
// is desired here as blocknumber 0 means it is a zlib batch
contexts = append(contexts, BatchContext{})
}
contexts = append(contexts, p.Contexts...)
// Write number of contexts followed by each fixed-size BatchContext. // Write number of contexts followed by each fixed-size BatchContext.
writeUint64(w, uint64(len(p.Contexts)), 3) _ = writeUint64(w, uint64(len(contexts)), 3)
for _, context := range p.Contexts { for _, context := range contexts {
context.Write(w) context.Write(w)
} }
// Write each length-prefixed tx. switch p.Type {
for _, tx := range p.Txs { case BatchTypeLegacy:
writeUint64(w, uint64(tx.Size()), TxLenSize) // Write each length-prefixed tx.
_, _ = w.Write(tx.RawTx()) // can't fail for bytes.Buffer for _, tx := range p.Txs {
_ = writeUint64(w, uint64(tx.Size()), TxLenSize)
_, _ = w.Write(tx.RawTx()) // can't fail for bytes.Buffer
}
case BatchTypeZlib:
zw := zlib.NewWriter(w)
for _, tx := range p.Txs {
if err := writeUint64(zw, uint64(tx.Size()), TxLenSize); err != nil {
return err
}
if _, err := zw.Write(tx.RawTx()); err != nil {
return err
}
}
if err := zw.Close(); err != nil {
return err
}
default:
return fmt.Errorf("Unknown batch type: %s", p.Type)
} }
return nil return nil
...@@ -159,6 +266,8 @@ func (p *AppendSequencerBatchParams) Read(r io.Reader) error { ...@@ -159,6 +266,8 @@ func (p *AppendSequencerBatchParams) Read(r io.Reader) error {
return err return err
} }
// Ensure that contexts is never nil
p.Contexts = make([]BatchContext, 0)
for i := uint64(0); i < numContexts; i++ { for i := uint64(0); i < numContexts; i++ {
var batchContext BatchContext var batchContext BatchContext
if err := batchContext.Read(r); err != nil { if err := batchContext.Read(r); err != nil {
...@@ -168,14 +277,44 @@ func (p *AppendSequencerBatchParams) Read(r io.Reader) error { ...@@ -168,14 +277,44 @@ func (p *AppendSequencerBatchParams) Read(r io.Reader) error {
p.Contexts = append(p.Contexts, batchContext) p.Contexts = append(p.Contexts, batchContext)
} }
// Assume that it is a legacy batch at first
p.Type = BatchTypeLegacy
// Handle backwards compatible batch types
if len(p.Contexts) > 0 && p.Contexts[0].Timestamp == 0 {
switch p.Contexts[0].BlockNumber {
case 0:
// zlib compressed transaction data
p.Type = BatchTypeZlib
// remove the first dummy context
p.Contexts = p.Contexts[1:]
numContexts--
zr, err := zlib.NewReader(r)
if err != nil {
return err
}
defer zr.Close()
r = bufio.NewReader(zr)
}
}
// Deserialize any transactions. Since the number of txs is ommitted // Deserialize any transactions. Since the number of txs is ommitted
// from the encoding, loop until the stream is consumed. // from the encoding, loop until the stream is consumed.
for { for {
var txLen uint64 var txLen uint64
err := readUint64(r, &txLen, TxLenSize) err := readUint64(r, &txLen, TxLenSize)
// Getting an EOF when reading the txLen expected for a cleanly // Getting an EOF when reading the txLen expected for a cleanly
// encoded object. Silece the error and return success. // encoded object. Silence the error and return success if
// the batch is well formed.
if err == io.EOF { if err == io.EOF {
if len(p.Contexts) == 0 && len(p.Txs) != 0 {
return ErrMalformedBatch
}
if len(p.Txs) == 0 && len(p.Contexts) != 0 {
return ErrMalformedBatch
}
return nil return nil
} else if err != nil { } else if err != nil {
return err return err
...@@ -188,10 +327,11 @@ func (p *AppendSequencerBatchParams) Read(r io.Reader) error { ...@@ -188,10 +327,11 @@ func (p *AppendSequencerBatchParams) Read(r io.Reader) error {
p.Txs = append(p.Txs, NewCachedTx(tx)) p.Txs = append(p.Txs, NewCachedTx(tx))
} }
} }
// writeUint64 writes a the bottom `n` bytes of `val` to `w`. // writeUint64 writes a the bottom `n` bytes of `val` to `w`.
func writeUint64(w *bytes.Buffer, val uint64, n uint) { func writeUint64(w io.Writer, val uint64, n uint) error {
if n < 1 || n > 8 { if n < 1 || n > 8 {
panic(fmt.Sprintf("invalid number of bytes %d must be 1-8", n)) panic(fmt.Sprintf("invalid number of bytes %d must be 1-8", n))
} }
...@@ -204,7 +344,8 @@ func writeUint64(w *bytes.Buffer, val uint64, n uint) { ...@@ -204,7 +344,8 @@ func writeUint64(w *bytes.Buffer, val uint64, n uint) {
var buf [8]byte var buf [8]byte
byteOrder.PutUint64(buf[:], val) byteOrder.PutUint64(buf[:], val)
_, _ = w.Write(buf[8-n:]) // can't fail for bytes.Buffer _, err := w.Write(buf[8-n:])
return err
} }
// readUint64 reads `n` bytes from `r` and returns them in the lower `n` bytes // readUint64 reads `n` bytes from `r` and returns them in the lower `n` bytes
......
...@@ -65,167 +65,21 @@ type AppendSequencerBatchParamsTest struct { ...@@ -65,167 +65,21 @@ type AppendSequencerBatchParamsTest struct {
TotalElementsToAppend uint64 `json:"total_elements_to_append"` TotalElementsToAppend uint64 `json:"total_elements_to_append"`
Contexts []sequencer.BatchContext `json:"contexts"` Contexts []sequencer.BatchContext `json:"contexts"`
Txs []string `json:"txs"` Txs []string `json:"txs"`
Error bool `json:"error"`
} }
var appendSequencerBatchParamTests = AppendSequencerBatchParamsTestCases{ var appendSequencerBatchParamTests = AppendSequencerBatchParamsTestCases{}
Tests: []AppendSequencerBatchParamsTest{
{
Name: "empty batch",
HexEncoding: "0000000000000000" +
"000000",
ShouldStartAtElement: 0,
TotalElementsToAppend: 0,
Contexts: nil,
Txs: nil,
},
{
Name: "single tx",
HexEncoding: "0000000001000001" +
"000000" +
"00000ac9808080808080808080",
ShouldStartAtElement: 1,
TotalElementsToAppend: 1,
Contexts: nil,
Txs: []string{
"c9808080808080808080",
},
},
{
Name: "multiple txs",
HexEncoding: "0000000001000004" +
"000000" +
"00000ac9808080808080808080" +
"00000ac9808080808080808080" +
"00000ac9808080808080808080" +
"00000ac9808080808080808080",
ShouldStartAtElement: 1,
TotalElementsToAppend: 4,
Contexts: nil,
Txs: []string{
"c9808080808080808080",
"c9808080808080808080",
"c9808080808080808080",
"c9808080808080808080",
},
},
{
Name: "single context",
HexEncoding: "0000000001000000" +
"000001" +
"000102030405060708090a0b0c0d0e0f",
ShouldStartAtElement: 1,
TotalElementsToAppend: 0,
Contexts: []sequencer.BatchContext{
{
NumSequencedTxs: 0x000102,
NumSubsequentQueueTxs: 0x030405,
Timestamp: 0x060708090a,
BlockNumber: 0x0b0c0d0e0f,
},
},
Txs: nil,
},
{
Name: "multiple contexts",
HexEncoding: "0000000001000000" +
"000004" +
"000102030405060708090a0b0c0d0e0f" +
"000102030405060708090a0b0c0d0e0f" +
"000102030405060708090a0b0c0d0e0f" +
"000102030405060708090a0b0c0d0e0f",
ShouldStartAtElement: 1,
TotalElementsToAppend: 0,
Contexts: []sequencer.BatchContext{
{
NumSequencedTxs: 0x000102,
NumSubsequentQueueTxs: 0x030405,
Timestamp: 0x060708090a,
BlockNumber: 0x0b0c0d0e0f,
},
{
NumSequencedTxs: 0x000102,
NumSubsequentQueueTxs: 0x030405,
Timestamp: 0x060708090a,
BlockNumber: 0x0b0c0d0e0f,
},
{
NumSequencedTxs: 0x000102,
NumSubsequentQueueTxs: 0x030405,
Timestamp: 0x060708090a,
BlockNumber: 0x0b0c0d0e0f,
},
{
NumSequencedTxs: 0x000102,
NumSubsequentQueueTxs: 0x030405,
Timestamp: 0x060708090a,
BlockNumber: 0x0b0c0d0e0f,
},
},
Txs: nil,
},
{
Name: "complex",
HexEncoding: "0102030405060708" +
"000004" +
"000102030405060708090a0b0c0d0e0f" +
"000102030405060708090a0b0c0d0e0f" +
"000102030405060708090a0b0c0d0e0f" +
"000102030405060708090a0b0c0d0e0f" +
"00000ac9808080808080808080" +
"00000ac9808080808080808080" +
"00000ac9808080808080808080" +
"00000ac9808080808080808080",
ShouldStartAtElement: 0x0102030405,
TotalElementsToAppend: 0x060708,
Contexts: []sequencer.BatchContext{
{
NumSequencedTxs: 0x000102,
NumSubsequentQueueTxs: 0x030405,
Timestamp: 0x060708090a,
BlockNumber: 0x0b0c0d0e0f,
},
{
NumSequencedTxs: 0x000102,
NumSubsequentQueueTxs: 0x030405,
Timestamp: 0x060708090a,
BlockNumber: 0x0b0c0d0e0f,
},
{
NumSequencedTxs: 0x000102,
NumSubsequentQueueTxs: 0x030405,
Timestamp: 0x060708090a,
BlockNumber: 0x0b0c0d0e0f,
},
{
NumSequencedTxs: 0x000102,
NumSubsequentQueueTxs: 0x030405,
Timestamp: 0x060708090a,
BlockNumber: 0x0b0c0d0e0f,
},
},
Txs: []string{
"c9808080808080808080",
"c9808080808080808080",
"c9808080808080808080",
"c9808080808080808080",
},
},
},
}
// TestAppendSequencerBatchParamsEncodeDecodeMatchesJSON ensures that the
// in-memory test vectors for valid encode/decode stay in sync with the JSON
// version.
func TestAppendSequencerBatchParamsEncodeDecodeMatchesJSON(t *testing.T) {
t.Parallel()
jsonBytes, err := json.MarshalIndent(appendSequencerBatchParamTests, "", "\t")
require.Nil(t, err)
func init() {
data, err := os.ReadFile("./testdata/valid_append_sequencer_batch_params.json") data, err := os.ReadFile("./testdata/valid_append_sequencer_batch_params.json")
require.Nil(t, err) if err != nil {
panic(err)
}
require.Equal(t, jsonBytes, data) err = json.Unmarshal(data, &appendSequencerBatchParamTests)
if err != nil {
panic(err)
}
} }
// TestAppendSequencerBatchParamsEncodeDecode asserts the proper encoding and // TestAppendSequencerBatchParamsEncodeDecode asserts the proper encoding and
...@@ -265,6 +119,7 @@ func testAppendSequencerBatchParamsEncodeDecode( ...@@ -265,6 +119,7 @@ func testAppendSequencerBatchParamsEncodeDecode(
TotalElementsToAppend: test.TotalElementsToAppend, TotalElementsToAppend: test.TotalElementsToAppend,
Contexts: test.Contexts, Contexts: test.Contexts,
Txs: nil, Txs: nil,
Type: sequencer.BatchTypeLegacy,
} }
// Decode the batch from the test string. // Decode the batch from the test string.
...@@ -273,7 +128,12 @@ func testAppendSequencerBatchParamsEncodeDecode( ...@@ -273,7 +128,12 @@ func testAppendSequencerBatchParamsEncodeDecode(
var params sequencer.AppendSequencerBatchParams var params sequencer.AppendSequencerBatchParams
err = params.Read(bytes.NewReader(rawBytes)) err = params.Read(bytes.NewReader(rawBytes))
require.Nil(t, err) if test.Error {
require.ErrorIs(t, err, sequencer.ErrMalformedBatch)
} else {
require.Nil(t, err)
}
require.Equal(t, params.Type, sequencer.BatchTypeLegacy)
// Assert that the decoded params match the expected params. The // Assert that the decoded params match the expected params. The
// transactions are compared serparetly (via hash), since the internal // transactions are compared serparetly (via hash), since the internal
...@@ -290,8 +150,34 @@ func testAppendSequencerBatchParamsEncodeDecode( ...@@ -290,8 +150,34 @@ func testAppendSequencerBatchParamsEncodeDecode(
// Finally, encode the decoded object and assert it matches the original // Finally, encode the decoded object and assert it matches the original
// hex string. // hex string.
paramsBytes, err := params.Serialize() paramsBytes, err := params.Serialize()
// Return early when testing error cases, no need to reserialize again
if test.Error {
require.ErrorIs(t, err, sequencer.ErrMalformedBatch)
return
}
require.Nil(t, err) require.Nil(t, err)
require.Equal(t, test.HexEncoding, hex.EncodeToString(paramsBytes)) require.Equal(t, test.HexEncoding, hex.EncodeToString(paramsBytes))
// Serialize the batches in compressed form
params.Type = sequencer.BatchTypeZlib
compressedParamsBytes, err := params.Serialize()
require.Nil(t, err)
// Deserialize the compressed batch
var paramsCompressed sequencer.AppendSequencerBatchParams
err = paramsCompressed.Read(bytes.NewReader(compressedParamsBytes))
require.Nil(t, err)
require.Equal(t, paramsCompressed.Type, sequencer.BatchTypeZlib)
expParams.Type = sequencer.BatchTypeZlib
decompressedTxs := paramsCompressed.Txs
paramsCompressed.Txs = nil
require.Equal(t, expParams, paramsCompressed)
compareTxs(t, expTxs, decompressedTxs)
paramsCompressed.Txs = decompressedTxs
} }
// compareTxs compares a list of two transactions, testing each pair by tx hash. // compareTxs compares a list of two transactions, testing each pair by tx hash.
......
...@@ -194,6 +194,12 @@ var ( ...@@ -194,6 +194,12 @@ var (
"mnemonic. The mnemonic flag must also be set.", "mnemonic. The mnemonic flag must also be set.",
EnvVar: prefixEnvVar("PROPOSER_HD_PATH"), EnvVar: prefixEnvVar("PROPOSER_HD_PATH"),
} }
SequencerBatchType = cli.StringFlag{
Name: "sequencer-batch-type",
Usage: "The type of sequencer batch to be submitted. Valid arguments are legacy or zlib.",
Value: "legacy",
EnvVar: prefixEnvVar("SEQUENCER_BATCH_TYPE"),
}
MetricsServerEnableFlag = cli.BoolFlag{ MetricsServerEnableFlag = cli.BoolFlag{
Name: "metrics-server-enable", Name: "metrics-server-enable",
Usage: "Whether or not to run the embedded metrics server", Usage: "Whether or not to run the embedded metrics server",
...@@ -245,6 +251,7 @@ var optionalFlags = []cli.Flag{ ...@@ -245,6 +251,7 @@ var optionalFlags = []cli.Flag{
SentryDsnFlag, SentryDsnFlag,
SentryTraceRateFlag, SentryTraceRateFlag,
BlockOffsetFlag, BlockOffsetFlag,
SequencerBatchType,
SequencerPrivateKeyFlag, SequencerPrivateKeyFlag,
ProposerPrivateKeyFlag, ProposerPrivateKeyFlag,
MnemonicFlag, MnemonicFlag,
......
...@@ -108,6 +108,9 @@ const procEnv = cleanEnv(process.env, { ...@@ -108,6 +108,9 @@ const procEnv = cleanEnv(process.env, {
MOCHA_BAIL: bool({ MOCHA_BAIL: bool({
default: false, default: false,
}), }),
BATCH_SUBMITTER_SEQUENCER_BATCH_TYPE: str({
default: 'zlib',
}),
}) })
export const envConfig = procEnv export const envConfig = procEnv
......
import { SequencerBatch, BatchType } from '@eth-optimism/core-utils'
import { expect } from './shared/setup'
import { OptimismEnv } from './shared/env'
import { envConfig } from './shared/utils'
describe('Batch Serialization', () => {
let env: OptimismEnv
// Allow for each type to be tested. The env var here must be
// the same value that is passed to the batch submitter
const batchType = envConfig.BATCH_SUBMITTER_SEQUENCER_BATCH_TYPE.toUpperCase()
before(async () => {
env = await OptimismEnv.new()
})
it('should fetch batches', async () => {
const tip = await env.l1Provider.getBlockNumber()
const logs = await env.ctc.queryFilter(
env.ctc.filters.TransactionBatchAppended(),
0,
tip
)
// collect all of the batches
const batches = []
for (const log of logs) {
const tx = await env.l1Provider.getTransaction(log.transactionHash)
batches.push(tx.data)
}
expect(batches.length).to.be.gt(0, 'Submit some batches first')
let latest = 0
// decode all of the batches
for (const batch of batches) {
// Typings don't work?
const decoded = (SequencerBatch as any).fromHex(batch)
expect(decoded.type).to.eq(BatchType[batchType])
// Iterate over all of the transactions, fetch them
// by hash and make sure their blocknumbers are in
// ascending order. This lets us skip handling deposits here
for (const transaction of decoded.transactions) {
const tx = transaction.toTransaction()
const got = await env.l2Provider.getTransaction(tx.hash)
expect(got).to.not.eq(null)
expect(got.blockNumber).to.be.gt(latest)
latest = got.blockNumber
}
}
})
})
...@@ -11,8 +11,9 @@ services: ...@@ -11,8 +11,9 @@ services:
env_file: env_file:
- ./envs/batch-submitter.env - ./envs/batch-submitter.env
environment: environment:
L1_ETH_RPC: http://l1_chain:8545 L1_ETH_RPC: http://l1_chain:8545
L2_ETH_RPC: http://l2geth:8545 L2_ETH_RPC: http://l2geth:8545
URL: http://deployer:8081/addresses.json URL: http://deployer:8081/addresses.json
BATCH_SUBMITTER_SEQUENCER_PRIVATE_KEY: "0x59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d" BATCH_SUBMITTER_SEQUENCER_PRIVATE_KEY: '0x59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d'
BATCH_SUBMITTER_PROPOSER_PRIVATE_KEY: "0x5de4111afa1a4b94908f83103eb1f1706367c2e68ca870fc3fb9a804cdab365a" BATCH_SUBMITTER_PROPOSER_PRIVATE_KEY: '0x5de4111afa1a4b94908f83103eb1f1706367c2e68ca870fc3fb9a804cdab365a'
BATCH_SUBMITTER_SEQUENCER_BATCH_TYPE: ${BATCH_SUBMITTER_SEQUENCER_BATCH_TYPE:-zlib}
...@@ -12,8 +12,9 @@ services: ...@@ -12,8 +12,9 @@ services:
env_file: env_file:
- ./envs/batches.env - ./envs/batches.env
environment: environment:
L1_NODE_WEB3_URL: http://l1_chain:8545 L1_NODE_WEB3_URL: http://l1_chain:8545
L2_NODE_WEB3_URL: http://l2geth:8545 L2_NODE_WEB3_URL: http://l2geth:8545
URL: http://deployer:8081/addresses.json URL: http://deployer:8081/addresses.json
SEQUENCER_PRIVATE_KEY: "0x59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d" SEQUENCER_PRIVATE_KEY: '0x59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d'
PROPOSER_PRIVATE_KEY: "0x5de4111afa1a4b94908f83103eb1f1706367c2e68ca870fc3fb9a804cdab365a" PROPOSER_PRIVATE_KEY: '0x5de4111afa1a4b94908f83103eb1f1706367c2e68ca870fc3fb9a804cdab365a'
BATCH_SUBMITTER_SEQUENCER_BATCH_TYPE: ${BATCH_SUBMITTER_SEQUENCER_BATCH_TYPE:-zlib}
version: "3.4" version: '3.4'
services: services:
# this is a helper service used because there's no official hardhat image # this is a helper service used because there's no official hardhat image
...@@ -8,9 +8,9 @@ services: ...@@ -8,9 +8,9 @@ services:
context: ./docker/hardhat context: ./docker/hardhat
dockerfile: Dockerfile dockerfile: Dockerfile
env_file: env_file:
- ./envs/l1_chain.env - ./envs/l1_chain.env
ports: ports:
# expose the service to the host for integration testing # expose the service to the host for integration testing
- ${L1CHAIN_HTTP_PORT:-9545}:8545 - ${L1CHAIN_HTTP_PORT:-9545}:8545
deployer: deployer:
...@@ -24,11 +24,11 @@ services: ...@@ -24,11 +24,11 @@ services:
environment: environment:
# Env vars for the deployment script. # Env vars for the deployment script.
CONTRACTS_RPC_URL: http://l1_chain:8545 CONTRACTS_RPC_URL: http://l1_chain:8545
CONTRACTS_DEPLOYER_KEY: "ac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80" CONTRACTS_DEPLOYER_KEY: 'ac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80'
CONTRACTS_TARGET_NETWORK: "custom" CONTRACTS_TARGET_NETWORK: 'custom'
OVM_ADDRESS_MANAGER_OWNER: "0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266" OVM_ADDRESS_MANAGER_OWNER: '0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266'
OVM_PROPOSER_ADDRESS: "0x3c44cdddb6a900fa2b585dd299e03d12fa4293bc" OVM_PROPOSER_ADDRESS: '0x3c44cdddb6a900fa2b585dd299e03d12fa4293bc'
OVM_SEQUENCER_ADDRESS: "0x70997970c51812dc3a010c7d01b50e0d17dc79c8" OVM_SEQUENCER_ADDRESS: '0x70997970c51812dc3a010c7d01b50e0d17dc79c8'
SCC_FRAUD_PROOF_WINDOW: 0 SCC_FRAUD_PROOF_WINDOW: 0
NUM_DEPLOY_CONFIRMATIONS: 0 NUM_DEPLOY_CONFIRMATIONS: 0
# skip compilation when run in docker-compose, since the contracts # skip compilation when run in docker-compose, since the contracts
...@@ -37,20 +37,20 @@ services: ...@@ -37,20 +37,20 @@ services:
# Env vars for the dump script. # Env vars for the dump script.
# Default hardhat account 5 # Default hardhat account 5
GAS_PRICE_ORACLE_OWNER: "0x9965507d1a55bcc2695c58ba16fb37d819b0a4dc" GAS_PRICE_ORACLE_OWNER: '0x9965507d1a55bcc2695c58ba16fb37d819b0a4dc'
# setting the whitelist owner to address(0) disables the whitelist # setting the whitelist owner to address(0) disables the whitelist
WHITELIST_OWNER: "0x0000000000000000000000000000000000000000" WHITELIST_OWNER: '0x0000000000000000000000000000000000000000'
L1_FEE_WALLET_ADDRESS: "0x391716d440c151c42cdf1c95c1d83a5427bca52c" L1_FEE_WALLET_ADDRESS: '0x391716d440c151c42cdf1c95c1d83a5427bca52c'
L2_CHAIN_ID: 420 L2_CHAIN_ID: 420
L2_BLOCK_GAS_LIMIT: 15000000 L2_BLOCK_GAS_LIMIT: 15000000
BLOCK_SIGNER_ADDRESS: "0x00000398232E2064F896018496b4b44b3D62751F" BLOCK_SIGNER_ADDRESS: '0x00000398232E2064F896018496b4b44b3D62751F'
GAS_PRICE_ORACLE_OVERHEAD: 2750 GAS_PRICE_ORACLE_OVERHEAD: 2750
GAS_PRICE_ORACLE_SCALAR: 1500000 GAS_PRICE_ORACLE_SCALAR: 1500000
GAS_PRICE_ORACLE_L1_BASE_FEE: 1 GAS_PRICE_ORACLE_L1_BASE_FEE: 1
GAS_PRICE_ORACLE_GAS_PRICE: 1 GAS_PRICE_ORACLE_GAS_PRICE: 1
GAS_PRICE_ORACLE_DECIMALS: 6 GAS_PRICE_ORACLE_DECIMALS: 6
ports: ports:
# expose the service to the host for getting the contract addrs # expose the service to the host for getting the contract addrs
- ${DEPLOYER_PORT:-8080}:8081 - ${DEPLOYER_PORT:-8080}:8081
dtl: dtl:
...@@ -65,17 +65,17 @@ services: ...@@ -65,17 +65,17 @@ services:
# override with the dtl script and the env vars required for it # override with the dtl script and the env vars required for it
entrypoint: ./dtl.sh entrypoint: ./dtl.sh
env_file: env_file:
- ./envs/dtl.env - ./envs/dtl.env
# set the rest of the env vars for the network whcih do not # set the rest of the env vars for the network whcih do not
# depend on the docker-compose setup # depend on the docker-compose setup
environment: environment:
# used for setting the address manager address # used for setting the address manager address
URL: http://deployer:8081/addresses.json URL: http://deployer:8081/addresses.json
# connect to the 2 layers # connect to the 2 layers
DATA_TRANSPORT_LAYER__L1_RPC_ENDPOINT: http://l1_chain:8545 DATA_TRANSPORT_LAYER__L1_RPC_ENDPOINT: http://l1_chain:8545
DATA_TRANSPORT_LAYER__L2_RPC_ENDPOINT: http://l2geth:8545 DATA_TRANSPORT_LAYER__L2_RPC_ENDPOINT: http://l2geth:8545
DATA_TRANSPORT_LAYER__SYNC_FROM_L2: 'true' DATA_TRANSPORT_LAYER__SYNC_FROM_L2: 'true'
DATA_TRANSPORT_LAYER__L2_CHAIN_ID: 420 DATA_TRANSPORT_LAYER__L2_CHAIN_ID: 420
ports: ports:
- ${DTL_PORT:-7878}:7878 - ${DTL_PORT:-7878}:7878
...@@ -91,20 +91,20 @@ services: ...@@ -91,20 +91,20 @@ services:
env_file: env_file:
- ./envs/geth.env - ./envs/geth.env
environment: environment:
ETH1_HTTP: http://l1_chain:8545 ETH1_HTTP: http://l1_chain:8545
ROLLUP_TIMESTAMP_REFRESH: 5s ROLLUP_TIMESTAMP_REFRESH: 5s
ROLLUP_STATE_DUMP_PATH: http://deployer:8081/state-dump.latest.json ROLLUP_STATE_DUMP_PATH: http://deployer:8081/state-dump.latest.json
# connecting to the DTL # connecting to the DTL
ROLLUP_CLIENT_HTTP: http://dtl:7878 ROLLUP_CLIENT_HTTP: http://dtl:7878
ETH1_CTC_DEPLOYMENT_HEIGHT: 8 ETH1_CTC_DEPLOYMENT_HEIGHT: 8
RETRIES: 60 RETRIES: 60
# no need to keep this secret, only used internally to sign blocks # no need to keep this secret, only used internally to sign blocks
BLOCK_SIGNER_KEY: "6587ae678cf4fc9a33000cdbf9f35226b71dcc6a4684a31203241f9bcfd55d27" BLOCK_SIGNER_KEY: '6587ae678cf4fc9a33000cdbf9f35226b71dcc6a4684a31203241f9bcfd55d27'
BLOCK_SIGNER_ADDRESS: "0x00000398232E2064F896018496b4b44b3D62751F" BLOCK_SIGNER_ADDRESS: '0x00000398232E2064F896018496b4b44b3D62751F'
ROLLUP_ENFORCE_FEES: ${ROLLUP_ENFORCE_FEES:-true} ROLLUP_ENFORCE_FEES: ${ROLLUP_ENFORCE_FEES:-true}
ROLLUP_FEE_THRESHOLD_DOWN: 0.9 ROLLUP_FEE_THRESHOLD_DOWN: 0.9
ROLLUP_FEE_THRESHOLD_UP: 1.1 ROLLUP_FEE_THRESHOLD_UP: 1.1
ports: ports:
- ${L2GETH_HTTP_PORT:-8545}:8545 - ${L2GETH_HTTP_PORT:-8545}:8545
- ${L2GETH_WS_PORT:-8546}:8546 - ${L2GETH_WS_PORT:-8546}:8546
...@@ -122,14 +122,14 @@ services: ...@@ -122,14 +122,14 @@ services:
target: relayer target: relayer
entrypoint: ./relayer.sh entrypoint: ./relayer.sh
environment: environment:
L1_NODE_WEB3_URL: http://l1_chain:8545 L1_NODE_WEB3_URL: http://l1_chain:8545
L2_NODE_WEB3_URL: http://l2geth:8545 L2_NODE_WEB3_URL: http://l2geth:8545
URL: http://deployer:8081/addresses.json URL: http://deployer:8081/addresses.json
# a funded hardhat account # a funded hardhat account
L1_WALLET_KEY: "0xdbda1821b80551c9d65939329250298aa3472ba22feea921c0cf5d620ea67b97" L1_WALLET_KEY: '0xdbda1821b80551c9d65939329250298aa3472ba22feea921c0cf5d620ea67b97'
RETRIES: 60 RETRIES: 60
POLLING_INTERVAL: 500 POLLING_INTERVAL: 500
GET_LOGS_INTERVAL: 500 GET_LOGS_INTERVAL: 500
verifier: verifier:
depends_on: depends_on:
...@@ -146,14 +146,14 @@ services: ...@@ -146,14 +146,14 @@ services:
env_file: env_file:
- ./envs/geth.env - ./envs/geth.env
environment: environment:
ETH1_HTTP: http://l1_chain:8545 ETH1_HTTP: http://l1_chain:8545
SEQUENCER_CLIENT_HTTP: http://l2geth:8545 SEQUENCER_CLIENT_HTTP: http://l2geth:8545
ROLLUP_STATE_DUMP_PATH: http://deployer:8081/state-dump.latest.json ROLLUP_STATE_DUMP_PATH: http://deployer:8081/state-dump.latest.json
ROLLUP_CLIENT_HTTP: http://dtl:7878 ROLLUP_CLIENT_HTTP: http://dtl:7878
ROLLUP_BACKEND: 'l1' ROLLUP_BACKEND: 'l1'
ETH1_CTC_DEPLOYMENT_HEIGHT: 8 ETH1_CTC_DEPLOYMENT_HEIGHT: 8
RETRIES: 60 RETRIES: 60
ROLLUP_VERIFIER_ENABLE: 'true' ROLLUP_VERIFIER_ENABLE: 'true'
ports: ports:
- ${VERIFIER_HTTP_PORT:-8547}:8545 - ${VERIFIER_HTTP_PORT:-8547}:8545
- ${VERIFIER_WS_PORT:-8548}:8546 - ${VERIFIER_WS_PORT:-8548}:8546
...@@ -171,21 +171,21 @@ services: ...@@ -171,21 +171,21 @@ services:
env_file: env_file:
- ./envs/geth.env - ./envs/geth.env
environment: environment:
ETH1_HTTP: http://l1_chain:8545 ETH1_HTTP: http://l1_chain:8545
SEQUENCER_CLIENT_HTTP: http://l2geth:8545 SEQUENCER_CLIENT_HTTP: http://l2geth:8545
ROLLUP_STATE_DUMP_PATH: http://deployer:8081/state-dump.latest.json ROLLUP_STATE_DUMP_PATH: http://deployer:8081/state-dump.latest.json
ROLLUP_CLIENT_HTTP: http://dtl:7878 ROLLUP_CLIENT_HTTP: http://dtl:7878
ROLLUP_BACKEND: 'l2' ROLLUP_BACKEND: 'l2'
ROLLUP_VERIFIER_ENABLE: 'true' ROLLUP_VERIFIER_ENABLE: 'true'
ETH1_CTC_DEPLOYMENT_HEIGHT: 8 ETH1_CTC_DEPLOYMENT_HEIGHT: 8
RETRIES: 60 RETRIES: 60
ports: ports:
- ${REPLICA_HTTP_PORT:-8549}:8545 - ${REPLICA_HTTP_PORT:-8549}:8545
- ${REPLICA_WS_PORT:-8550}:8546 - ${REPLICA_WS_PORT:-8550}:8546
integration_tests: integration_tests:
deploy: deploy:
replicas: 0 replicas: 0
build: build:
context: .. context: ..
dockerfile: ./ops/docker/Dockerfile.packages dockerfile: ./ops/docker/Dockerfile.packages
...@@ -199,10 +199,11 @@ services: ...@@ -199,10 +199,11 @@ services:
URL: http://deployer:8081/addresses.json URL: http://deployer:8081/addresses.json
ENABLE_GAS_REPORT: 1 ENABLE_GAS_REPORT: 1
NO_NETWORK: 1 NO_NETWORK: 1
BATCH_SUBMITTER_SEQUENCER_BATCH_TYPE: ${BATCH_SUBMITTER_SEQUENCER_BATCH_TYPE:-zlib}
gas_oracle: gas_oracle:
deploy: deploy:
replicas: 0 replicas: 0
build: build:
context: .. context: ..
dockerfile: ./ops/docker/Dockerfile.gas-oracle dockerfile: ./ops/docker/Dockerfile.gas-oracle
...@@ -210,4 +211,4 @@ services: ...@@ -210,4 +211,4 @@ services:
environment: environment:
GAS_PRICE_ORACLE_ETHEREUM_HTTP_URL: http://l2geth:8545 GAS_PRICE_ORACLE_ETHEREUM_HTTP_URL: http://l2geth:8545
# Default hardhat account 5 # Default hardhat account 5
GAS_PRICE_ORACLE_PRIVATE_KEY: "0x8b3a350cf5c34c9194ca85829a2df0ec3153be0318b5e2d3348e872092edffba" GAS_PRICE_ORACLE_PRIVATE_KEY: '0x8b3a350cf5c34c9194ca85829a2df0ec3153be0318b5e2d3348e872092edffba'
...@@ -12,6 +12,7 @@ import { ...@@ -12,6 +12,7 @@ import {
BatchElement, BatchElement,
Batch, Batch,
QueueOrigin, QueueOrigin,
BatchType,
} from '@eth-optimism/core-utils' } from '@eth-optimism/core-utils'
import { Logger, Metrics } from '@eth-optimism/common-ts' import { Logger, Metrics } from '@eth-optimism/common-ts'
...@@ -39,6 +40,7 @@ export class TransactionBatchSubmitter extends BatchSubmitter { ...@@ -39,6 +40,7 @@ export class TransactionBatchSubmitter extends BatchSubmitter {
private validateBatch: boolean private validateBatch: boolean
private transactionSubmitter: TransactionSubmitter private transactionSubmitter: TransactionSubmitter
private gasThresholdInGwei: number private gasThresholdInGwei: number
private batchType: BatchType
constructor( constructor(
signer: Signer, signer: Signer,
...@@ -61,7 +63,8 @@ export class TransactionBatchSubmitter extends BatchSubmitter { ...@@ -61,7 +63,8 @@ export class TransactionBatchSubmitter extends BatchSubmitter {
fixDoublePlayedDeposits: false, fixDoublePlayedDeposits: false,
fixMonotonicity: false, fixMonotonicity: false,
fixSkippedDeposits: false, fixSkippedDeposits: false,
} // TODO: Remove this }, // TODO: Remove this
batchType: string
) { ) {
super( super(
signer, signer,
...@@ -84,9 +87,18 @@ export class TransactionBatchSubmitter extends BatchSubmitter { ...@@ -84,9 +87,18 @@ export class TransactionBatchSubmitter extends BatchSubmitter {
this.gasThresholdInGwei = gasThresholdInGwei this.gasThresholdInGwei = gasThresholdInGwei
this.transactionSubmitter = transactionSubmitter this.transactionSubmitter = transactionSubmitter
this.logger.info('Batch validation options', { if (batchType === 'legacy') {
this.batchType = BatchType.LEGACY
} else if (batchType === 'zlib') {
this.batchType = BatchType.ZLIB
} else {
throw new Error(`Invalid batch type: ${batchType}`)
}
this.logger.info('Batch options', {
autoFixBatchOptions, autoFixBatchOptions,
validateBatch, validateBatch,
batchType: BatchType[this.batchType],
}) })
} }
...@@ -295,6 +307,7 @@ export class TransactionBatchSubmitter extends BatchSubmitter { ...@@ -295,6 +307,7 @@ export class TransactionBatchSubmitter extends BatchSubmitter {
startBlock, startBlock,
batch batch
) )
let wasBatchTruncated = false let wasBatchTruncated = false
let encoded = encodeAppendSequencerBatch(sequencerBatchParams) let encoded = encodeAppendSequencerBatch(sequencerBatchParams)
while (encoded.length / 2 > this.maxTxSize) { while (encoded.length / 2 > this.maxTxSize) {
...@@ -313,10 +326,14 @@ export class TransactionBatchSubmitter extends BatchSubmitter { ...@@ -313,10 +326,14 @@ export class TransactionBatchSubmitter extends BatchSubmitter {
wasBatchTruncated = true wasBatchTruncated = true
} }
// Set the batch type so that it is serialized correctly
sequencerBatchParams.type = this.batchType
this.logger.info('Generated sequencer batch params', { this.logger.info('Generated sequencer batch params', {
contexts: sequencerBatchParams.contexts, contexts: sequencerBatchParams.contexts,
transactions: sequencerBatchParams.transactions, transactions: sequencerBatchParams.transactions,
wasBatchTruncated, wasBatchTruncated,
type: BatchType[sequencerBatchParams.type],
}) })
return [sequencerBatchParams, wasBatchTruncated] return [sequencerBatchParams, wasBatchTruncated]
} }
......
...@@ -250,6 +250,11 @@ export const run = async () => { ...@@ -250,6 +250,11 @@ export const run = async () => {
env.VALIDATE_TX_BATCH ? env.VALIDATE_TX_BATCH === 'true' : false env.VALIDATE_TX_BATCH ? env.VALIDATE_TX_BATCH === 'true' : false
) )
const SEQUENCER_BATCH_TYPE = config.str(
'sequencer-batch-type',
env.SEQUENCER_BATCH_TYPE || 'legacy'
)
// Auto fix batch options -- TODO: Remove this very hacky config // Auto fix batch options -- TODO: Remove this very hacky config
const AUTO_FIX_BATCH_OPTIONS_CONF = config.str( const AUTO_FIX_BATCH_OPTIONS_CONF = config.str(
'auto-fix-batch-conf', 'auto-fix-batch-conf',
...@@ -402,7 +407,8 @@ export const run = async () => { ...@@ -402,7 +407,8 @@ export const run = async () => {
VALIDATE_TX_BATCH, VALIDATE_TX_BATCH,
logger.child({ name: TX_BATCH_SUBMITTER_LOG_TAG }), logger.child({ name: TX_BATCH_SUBMITTER_LOG_TAG }),
metrics, metrics,
autoFixBatchOptions autoFixBatchOptions,
SEQUENCER_BATCH_TYPE
) )
const stateBatchTxSubmitter: TransactionSubmitter = const stateBatchTxSubmitter: TransactionSubmitter =
......
...@@ -4,12 +4,11 @@ import { ...@@ -4,12 +4,11 @@ import {
TransactionResponse, TransactionResponse,
TransactionRequest, TransactionRequest,
} from '@ethersproject/abstract-provider' } from '@ethersproject/abstract-provider'
import { keccak256 } from 'ethers/lib/utils'
import { import {
AppendSequencerBatchParams, AppendSequencerBatchParams,
BatchContext, BatchContext,
encodeAppendSequencerBatch, encodeAppendSequencerBatch,
remove0x, sequencerBatch,
} from '@eth-optimism/core-utils' } from '@eth-optimism/core-utils'
export { encodeAppendSequencerBatch, BatchContext, AppendSequencerBatchParams } export { encodeAppendSequencerBatch, BatchContext, AppendSequencerBatchParams }
...@@ -52,10 +51,6 @@ export class CanonicalTransactionChainContract extends Contract { ...@@ -52,10 +51,6 @@ export class CanonicalTransactionChainContract extends Contract {
* Internal Functions * * Internal Functions *
*********************/ *********************/
const APPEND_SEQUENCER_BATCH_METHOD_ID = keccak256(
Buffer.from('appendSequencerBatch()')
).slice(2, 10)
const appendSequencerBatch = async ( const appendSequencerBatch = async (
OVM_CanonicalTransactionChain: Contract, OVM_CanonicalTransactionChain: Contract,
batch: AppendSequencerBatchParams, batch: AppendSequencerBatchParams,
...@@ -68,8 +63,6 @@ const appendSequencerBatch = async ( ...@@ -68,8 +63,6 @@ const appendSequencerBatch = async (
}) })
} }
const getEncodedCalldata = (batch: AppendSequencerBatchParams): string => { const getEncodedCalldata = (params: AppendSequencerBatchParams): string => {
const methodId = APPEND_SEQUENCER_BATCH_METHOD_ID return sequencerBatch.encode(params)
const calldata = encodeAppendSequencerBatch(batch)
return '0x' + remove0x(methodId) + remove0x(calldata)
} }
...@@ -226,7 +226,13 @@ describe('BatchSubmitter', () => { ...@@ -226,7 +226,13 @@ describe('BatchSubmitter', () => {
1, 1,
false, false,
new Logger({ name: TX_BATCH_SUBMITTER_LOG_TAG }), new Logger({ name: TX_BATCH_SUBMITTER_LOG_TAG }),
testMetrics testMetrics,
{
fixDoublePlayedDeposits: false,
fixMonotonicity: false,
fixSkippedDeposits: false,
},
'legacy'
) )
} }
......
...@@ -49,7 +49,7 @@ task('set-l2-gasprice') ...@@ -49,7 +49,7 @@ task('set-l2-gasprice')
const GasPriceOracle = new ethers.Contract( const GasPriceOracle = new ethers.Contract(
predeploys.OVM_GasPriceOracle, predeploys.OVM_GasPriceOracle,
GasPriceOracleArtifact.abi, GasPriceOracleArtifact.abi,
provider signer
) )
const addr = await signer.getAddress() const addr = await signer.getAddress()
......
...@@ -35,7 +35,9 @@ ...@@ -35,7 +35,9 @@
"@ethersproject/abstract-provider": "^5.5.1", "@ethersproject/abstract-provider": "^5.5.1",
"@ethersproject/bytes": "^5.5.0", "@ethersproject/bytes": "^5.5.0",
"@ethersproject/providers": "^5.5.3", "@ethersproject/providers": "^5.5.3",
"@ethersproject/transactions": "^5.5.0",
"@ethersproject/web": "^5.5.1", "@ethersproject/web": "^5.5.1",
"bufio": "^1.0.7",
"chai": "^4.3.4", "chai": "^4.3.4",
"ethers": "^5.5.4" "ethers": "^5.5.4"
}, },
......
declare module 'bufio' {
class BufferWriter {
public offset: number
constructor()
render(): Buffer
getSize(): number
seek(offset: number): this
destroy(): this
writeU8(n: number): this
writeU16(n: number): this
writeU16BE(n: number): this
writeU24(n: number): this
writeU24BE(n: number): this
writeU32(n: number): this
writeU32BE(n: number): this
writeU40(n: number): this
writeU40BE(n: number): this
writeU48(n: number): this
writeU48BE(n: number): this
writeU56(n: number): this
writeU56BE(n: number): this
writeU64(n: number): this
writeU64BE(n: number): this
writeBytes(b: Buffer): this
copy(value: number, start: number, end: number): this
}
class BufferReader {
constructor(data: Buffer, copy?: boolean)
getSize(): number
check(n: number): void
left(): number
seek(offset: number): this
start(): number
end(): number
destroy(): this
readU8(): number
readU16(): number
readU16BE(): number
readU24(): number
readU24BE(): number
readU32(): number
readU32BE(): number
readU40(): number
readU40BE(): number
readU48(): number
readU48BE(): number
readU56(): number
readU56BE(): number
readU64(): number
readU64BE(): number
readBytes(size: number, copy?: boolean): Buffer
}
class Struct {
constructor()
encode(extra?: object): Buffer
decode<T extends Struct>(data: Buffer, extra?: object): T
getSize(extra?: object): number
fromHex(s: string, extra?: object): this
toHex(): string
write(bw: BufferWriter, extra?: object): BufferWriter
read(br: BufferReader, extra?: object): this
static read<T extends Struct>(br: BufferReader, extra?: object): T
static decode<T extends Struct>(data: Buffer, extra?: object): T
static fromHex<T extends Struct>(s: string, extra?: object): T
}
}
...@@ -7,23 +7,31 @@ import { ...@@ -7,23 +7,31 @@ import {
encodeAppendSequencerBatch, encodeAppendSequencerBatch,
decodeAppendSequencerBatch, decodeAppendSequencerBatch,
sequencerBatch, sequencerBatch,
BatchType,
SequencerBatch,
} from '../src' } from '../src'
describe('BatchEncoder', () => { describe('BatchEncoder', function () {
this.timeout(10_000)
// eslint-disable-next-line @typescript-eslint/no-var-requires
const data = require('./fixtures/calldata.json')
describe('appendSequencerBatch', () => { describe('appendSequencerBatch', () => {
it('should work with the simple case', () => { it('legacy: should work with the simple case', () => {
const batch = { const batch = {
shouldStartAtElement: 0, shouldStartAtElement: 0,
totalElementsToAppend: 0, totalElementsToAppend: 0,
contexts: [], contexts: [],
transactions: [], transactions: [],
type: BatchType.LEGACY,
} }
const encoded = encodeAppendSequencerBatch(batch) const encoded = encodeAppendSequencerBatch(batch)
const decoded = decodeAppendSequencerBatch(encoded) const decoded = decodeAppendSequencerBatch(encoded)
expect(decoded).to.deep.equal(batch) expect(decoded).to.deep.equal(batch)
}) })
it('should work with more complex case', () => { it('legacy: should work with more complex case', () => {
const batch = { const batch = {
shouldStartAtElement: 10, shouldStartAtElement: 10,
totalElementsToAppend: 1, totalElementsToAppend: 1,
...@@ -36,19 +44,57 @@ describe('BatchEncoder', () => { ...@@ -36,19 +44,57 @@ describe('BatchEncoder', () => {
}, },
], ],
transactions: ['0x45423400000011', '0x45423400000012'], transactions: ['0x45423400000011', '0x45423400000012'],
type: BatchType.LEGACY,
} }
const encoded = encodeAppendSequencerBatch(batch) const encoded = encodeAppendSequencerBatch(batch)
const decoded = decodeAppendSequencerBatch(encoded) const decoded = decodeAppendSequencerBatch(encoded)
expect(decoded).to.deep.equal(batch) expect(decoded).to.deep.equal(batch)
}) })
it('should work with mainnet calldata', () => { describe('mainnet data', () => {
// eslint-disable-next-line @typescript-eslint/no-var-requires for (const [hash, calldata] of Object.entries(data)) {
const data = require('./fixtures/appendSequencerBatch.json') // Deserialize the raw calldata
for (const calldata of data.calldata) { const decoded = SequencerBatch.fromHex<SequencerBatch>(
const decoded = sequencerBatch.decode(calldata) calldata as string
const encoded = sequencerBatch.encode(decoded) )
expect(encoded).to.equal(calldata)
it(`${hash}`, () => {
const encoded = decoded.toHex()
expect(encoded).to.deep.equal(calldata)
const batch = SequencerBatch.decode(decoded.encode())
expect(decoded).to.deep.eq(batch)
})
it(`${hash} (compressed)`, () => {
// Set the batch type to be zlib so that the batch
// is compressed
decoded.type = BatchType.ZLIB
// Encode a compressed batch
const encodedCompressed = decoded.encode()
// Decode a compressed batch
const decodedPostCompressed =
SequencerBatch.decode<SequencerBatch>(encodedCompressed)
// Expect that the batch type is detected
expect(decodedPostCompressed.type).to.eq(BatchType.ZLIB)
// Expect that the contexts match
expect(decoded.contexts).to.deep.equal(decodedPostCompressed.contexts)
for (const [i, tx] of decoded.transactions.entries()) {
const got = decodedPostCompressed.transactions[i]
expect(got).to.deep.eq(tx)
}
// Reserialize the batch as legacy
decodedPostCompressed.type = BatchType.LEGACY
// Ensure that the original data can be recovered
const encoded = decodedPostCompressed.toHex()
expect(encoded).to.deep.equal(calldata)
})
it(`${hash}: serialize txs`, () => {
for (const tx of decoded.transactions) {
tx.toTransaction()
}
})
} }
}) })
......
This diff is collapsed.
{ {
"extends": "../../tsconfig.json" "extends": "../../tsconfig.json",
"typeRoots": ["node_modules/@types", "src/@types"]
} }
/* Imports: External */ /* Imports: External */
import { LevelUp } from 'levelup' import { LevelUp } from 'levelup'
import { BigNumber } from 'ethers' import { BigNumber } from 'ethers'
import { BatchType } from '@eth-optimism/core-utils'
/* Imports: Internal */ /* Imports: Internal */
import { SimpleDB } from './simple-db' import { SimpleDB } from './simple-db'
...@@ -127,7 +128,14 @@ export class TransportDB { ...@@ -127,7 +128,14 @@ export class TransportDB {
public async getTransactionBatchByIndex( public async getTransactionBatchByIndex(
index: number index: number
): Promise<TransactionBatchEntry> { ): Promise<TransactionBatchEntry> {
return this._getEntryByIndex(TRANSPORT_DB_KEYS.TRANSACTION_BATCH, index) const entry = (await this._getEntryByIndex(
TRANSPORT_DB_KEYS.TRANSACTION_BATCH,
index
)) as TransactionBatchEntry
if (entry && typeof entry.type === 'undefined') {
entry.type = BatchType[BatchType.LEGACY]
}
return entry
} }
public async getStateRootByIndex(index: number): Promise<StateRootEntry> { public async getStateRootByIndex(index: number): Promise<StateRootEntry> {
...@@ -169,7 +177,13 @@ export class TransportDB { ...@@ -169,7 +177,13 @@ export class TransportDB {
} }
public async getLatestTransactionBatch(): Promise<TransactionBatchEntry> { public async getLatestTransactionBatch(): Promise<TransactionBatchEntry> {
return this._getLatestEntry(TRANSPORT_DB_KEYS.TRANSACTION_BATCH) const entry = (await this._getLatestEntry(
TRANSPORT_DB_KEYS.TRANSACTION_BATCH
)) as TransactionBatchEntry
if (entry && typeof entry.type === 'undefined') {
entry.type = BatchType[BatchType.LEGACY]
}
return entry
} }
public async getLatestStateRoot(): Promise<StateRootEntry> { public async getLatestStateRoot(): Promise<StateRootEntry> {
......
/* Imports: External */ /* Imports: External */
import { BigNumber, ethers, constants } from 'ethers' import { BigNumber, ethers, constants } from 'ethers'
import { serialize, Transaction } from '@ethersproject/transactions'
import { getContractFactory } from '@eth-optimism/contracts' import { getContractFactory } from '@eth-optimism/contracts'
import { import {
fromHexString,
toHexString, toHexString,
toRpcHexString, toRpcHexString,
BatchType,
SequencerBatch,
} from '@eth-optimism/core-utils' } from '@eth-optimism/core-utils'
import { SequencerBatchAppendedEvent } from '@eth-optimism/contracts/dist/types/CanonicalTransactionChain' import { SequencerBatchAppendedEvent } from '@eth-optimism/contracts/dist/types/CanonicalTransactionChain'
...@@ -76,33 +78,33 @@ export const handleEventsSequencerBatchAppended: EventHandlerSet< ...@@ -76,33 +78,33 @@ export const handleEventsSequencerBatchAppended: EventHandlerSet<
parseEvent: (event, extraData, l2ChainId) => { parseEvent: (event, extraData, l2ChainId) => {
const transactionEntries: TransactionEntry[] = [] const transactionEntries: TransactionEntry[] = []
// It's easier to deal with this data if it's a Buffer. // 12 * 2 + 2 = 26
const calldata = fromHexString(extraData.l1TransactionData) if (extraData.l1TransactionData.length < 26) {
if (calldata.length < 12) {
throw new Error( throw new Error(
`Block ${extraData.blockNumber} transaction data is invalid for decoding: ${extraData.l1TransactionData} , ` + `Block ${extraData.blockNumber} transaction data is too small: ${extraData.l1TransactionData.length}`
`converted buffer length is < 12.`
) )
} }
const numContexts = BigNumber.from(calldata.slice(12, 15)).toNumber()
// TODO: typings not working?
const decoded = (SequencerBatch as any).fromHex(extraData.l1TransactionData)
// Keep track of the CTC index
let transactionIndex = 0 let transactionIndex = 0
// Keep track of the number of deposits
let enqueuedCount = 0 let enqueuedCount = 0
let nextTxPointer = 15 + 16 * numContexts // Keep track of the tx index in the current batch
for (let i = 0; i < numContexts; i++) { let index = 0
const contextPointer = 15 + 16 * i
const context = parseSequencerBatchContext(calldata, contextPointer)
for (const context of decoded.contexts) {
for (let j = 0; j < context.numSequencedTransactions; j++) { for (let j = 0; j < context.numSequencedTransactions; j++) {
const sequencerTransaction = parseSequencerBatchTransaction( const buf = decoded.transactions[index]
calldata, if (!buf) {
nextTxPointer throw new Error(
) `Invalid batch context, tx count: ${decoded.transactions.length}, attempting to parse ${index}`
)
}
const decoded = decodeSequencerBatchTransaction( const tx = buf.toTransaction()
sequencerTransaction,
l2ChainId
)
transactionEntries.push({ transactionEntries.push({
index: extraData.prevTotalElements index: extraData.prevTotalElements
...@@ -114,16 +116,29 @@ export const handleEventsSequencerBatchAppended: EventHandlerSet< ...@@ -114,16 +116,29 @@ export const handleEventsSequencerBatchAppended: EventHandlerSet<
gasLimit: BigNumber.from(0).toString(), gasLimit: BigNumber.from(0).toString(),
target: constants.AddressZero, target: constants.AddressZero,
origin: null, origin: null,
data: toHexString(sequencerTransaction), data: serialize(
{
nonce: tx.nonce,
gasPrice: tx.gasPrice,
gasLimit: tx.gasLimit,
to: tx.to,
value: tx.value,
data: tx.data,
},
{
v: tx.v,
r: tx.r,
s: tx.s,
}
),
queueOrigin: 'sequencer', queueOrigin: 'sequencer',
value: decoded.value, value: toRpcHexString(tx.value),
queueIndex: null, queueIndex: null,
decoded, decoded: mapSequencerTransaction(tx, l2ChainId),
confirmed: true, confirmed: true,
}) })
nextTxPointer += 3 + sequencerTransaction.length
transactionIndex++ transactionIndex++
index++
} }
for (let j = 0; j < context.numSubsequentQueueTransactions; j++) { for (let j = 0; j < context.numSubsequentQueueTransactions; j++) {
...@@ -169,6 +184,7 @@ export const handleEventsSequencerBatchAppended: EventHandlerSet< ...@@ -169,6 +184,7 @@ export const handleEventsSequencerBatchAppended: EventHandlerSet<
timestamp: BigNumber.from(extraData.timestamp).toNumber(), timestamp: BigNumber.from(extraData.timestamp).toNumber(),
submitter: extraData.submitter, submitter: extraData.submitter,
l1TransactionHash: extraData.l1TransactionHash, l1TransactionHash: extraData.l1TransactionHash,
type: BatchType[decoded.type],
} }
return { return {
...@@ -206,61 +222,21 @@ export const handleEventsSequencerBatchAppended: EventHandlerSet< ...@@ -206,61 +222,21 @@ export const handleEventsSequencerBatchAppended: EventHandlerSet<
}, },
} }
interface SequencerBatchContext { const mapSequencerTransaction = (
numSequencedTransactions: number tx: Transaction,
numSubsequentQueueTransactions: number
timestamp: number
blockNumber: number
}
const parseSequencerBatchContext = (
calldata: Buffer,
offset: number
): SequencerBatchContext => {
return {
numSequencedTransactions: BigNumber.from(
calldata.slice(offset, offset + 3)
).toNumber(),
numSubsequentQueueTransactions: BigNumber.from(
calldata.slice(offset + 3, offset + 6)
).toNumber(),
timestamp: BigNumber.from(
calldata.slice(offset + 6, offset + 11)
).toNumber(),
blockNumber: BigNumber.from(
calldata.slice(offset + 11, offset + 16)
).toNumber(),
}
}
const parseSequencerBatchTransaction = (
calldata: Buffer,
offset: number
): Buffer => {
const transactionLength = BigNumber.from(
calldata.slice(offset, offset + 3)
).toNumber()
return calldata.slice(offset + 3, offset + 3 + transactionLength)
}
const decodeSequencerBatchTransaction = (
transaction: Buffer,
l2ChainId: number l2ChainId: number
): DecodedSequencerBatchTransaction => { ): DecodedSequencerBatchTransaction => {
const decodedTx = ethers.utils.parseTransaction(transaction)
return { return {
nonce: BigNumber.from(decodedTx.nonce).toString(), nonce: BigNumber.from(tx.nonce).toString(),
gasPrice: BigNumber.from(decodedTx.gasPrice).toString(), gasPrice: BigNumber.from(tx.gasPrice).toString(),
gasLimit: BigNumber.from(decodedTx.gasLimit).toString(), gasLimit: BigNumber.from(tx.gasLimit).toString(),
value: toRpcHexString(decodedTx.value), value: toRpcHexString(tx.value),
target: decodedTx.to ? toHexString(decodedTx.to) : null, target: tx.to ? toHexString(tx.to) : null,
data: toHexString(decodedTx.data), data: toHexString(tx.data),
sig: { sig: {
v: parseSignatureVParam(decodedTx.v, l2ChainId), v: parseSignatureVParam(tx.v, l2ChainId),
r: toHexString(decodedTx.r), r: toHexString(tx.r),
s: toHexString(decodedTx.s), s: toHexString(tx.s),
}, },
} }
} }
...@@ -60,6 +60,7 @@ export const handleEventsStateBatchAppended: EventHandlerSet< ...@@ -60,6 +60,7 @@ export const handleEventsStateBatchAppended: EventHandlerSet<
prevTotalElements: event.args._prevTotalElements.toNumber(), prevTotalElements: event.args._prevTotalElements.toNumber(),
extraData: event.args._extraData, extraData: event.args._extraData,
l1TransactionHash: extraData.l1TransactionHash, l1TransactionHash: extraData.l1TransactionHash,
type: 'LEGACY', // There is currently only 1 state root batch type
} }
return { return {
......
...@@ -48,6 +48,7 @@ interface BatchEntry { ...@@ -48,6 +48,7 @@ interface BatchEntry {
prevTotalElements: number prevTotalElements: number
extraData: string extraData: string
l1TransactionHash: string l1TransactionHash: string
type: string
} }
export type TransactionBatchEntry = BatchEntry export type TransactionBatchEntry = BatchEntry
......
/* External Imports */ import fs from 'fs'
import path from 'path'
import chai = require('chai') import chai = require('chai')
import Mocha from 'mocha' import Mocha from 'mocha'
import chaiAsPromised from 'chai-as-promised' import chaiAsPromised from 'chai-as-promised'
import { BigNumber } from 'ethers'
// Chai plugins go here. // Chai plugins go here.
chai.use(chaiAsPromised) chai.use(chaiAsPromised)
...@@ -9,4 +12,38 @@ chai.use(chaiAsPromised) ...@@ -9,4 +12,38 @@ chai.use(chaiAsPromised)
const should = chai.should() const should = chai.should()
const expect = chai.expect const expect = chai.expect
export { should, expect, Mocha } const readMockData = () => {
const mockDataPath = path.join(__dirname, 'unit-tests', 'examples')
const paths = fs.readdirSync(mockDataPath)
const files = []
for (const filename of paths) {
// Skip non .txt files
if (!filename.endsWith('.txt')) {
continue
}
const filePath = path.join(mockDataPath, filename)
const file = fs.readFileSync(filePath)
const obj = JSON.parse(file.toString())
// Reserialize the BigNumbers
obj.input.extraData.prevTotalElements = BigNumber.from(
obj.input.extraData.prevTotalElements
)
obj.input.extraData.batchIndex = BigNumber.from(
obj.input.extraData.batchIndex
)
if (obj.input.event.args.length !== 3) {
throw new Error(`ABI mismatch`)
}
obj.input.event.args = obj.input.event.args.map(BigNumber.from)
obj.input.event.args._startingQueueIndex = obj.input.event.args[0]
obj.input.event.args._numQueueElements = obj.input.event.args[1]
obj.input.event.args._totalElements = obj.input.event.args[2]
obj.input.extraData.batchSize = BigNumber.from(
obj.input.extraData.batchSize
)
files.push(obj)
}
return files
}
export { should, expect, Mocha, readMockData }
import { BigNumber, ethers } from 'ethers' import { BigNumber, ethers } from 'ethers'
import { sequencerBatch, add0x, BatchType } from '@eth-optimism/core-utils'
const compressBatchWithZlib = (calldata: string): string => {
const batch = sequencerBatch.decode(calldata)
batch.type = BatchType.ZLIB
const encoded = sequencerBatch.encode(batch)
return add0x(encoded)
}
/* Imports: Internal */ /* Imports: Internal */
import { expect } from '../../../../setup' import { expect, readMockData } from '../../../../setup'
import { handleEventsSequencerBatchAppended } from '../../../../../src/services/l1-ingestion/handlers/sequencer-batch-appended' import { handleEventsSequencerBatchAppended } from '../../../../../src/services/l1-ingestion/handlers/sequencer-batch-appended'
import { SequencerBatchAppendedExtraData } from '../../../../../src/types' import { SequencerBatchAppendedExtraData } from '../../../../../src/types'
describe('Event Handlers: CanonicalTransactionChain.SequencerBatchAppended', () => { describe('Event Handlers: CanonicalTransactionChain.SequencerBatchAppended', () => {
const mockData = readMockData()
describe('handleEventsSequencerBatchAppended.parseEvent', () => { describe('handleEventsSequencerBatchAppended.parseEvent', () => {
// This tests the behavior of parsing a real mainnet transaction, // This tests the behavior of parsing a real mainnet transaction,
// so it will break if the encoding scheme changes. // so it will break if the encoding scheme changes.
...@@ -46,9 +56,53 @@ describe('Event Handlers: CanonicalTransactionChain.SequencerBatchAppended', () ...@@ -46,9 +56,53 @@ describe('Event Handlers: CanonicalTransactionChain.SequencerBatchAppended', ()
expect(() => { expect(() => {
handleEventsSequencerBatchAppended.parseEvent(...input1) handleEventsSequencerBatchAppended.parseEvent(...input1)
}).to.throw( }).to.throw(
`Block ${input1[1].blockNumber} transaction data is invalid for decoding: ${input1[1].l1TransactionData} , ` + `Block ${input1[1].blockNumber} transaction data is too small: ${input1[1].l1TransactionData.length}`
`converted buffer length is < 12.`
) )
}) })
describe('mainnet transactions', () => {
for (const mock of mockData) {
const { input, output } = mock
const { event, extraData, l2ChainId } = input
const hash = mock.input.extraData.l1TransactionHash
it(`uncompressed: ${hash}`, () => {
// Set the type to be legacy
output.transactionBatchEntry.type = BatchType[BatchType.LEGACY]
const res = handleEventsSequencerBatchAppended.parseEvent(
event,
extraData,
l2ChainId
)
// Check all of the transaction entries individually
for (const [i, got] of res.transactionEntries.entries()) {
const expected = output.transactionEntries[i]
expect(got).to.deep.eq(expected, `case ${i}`)
}
expect(res).to.deep.eq(output)
})
it(`compressed: ${hash}`, () => {
// Set the type to be zlib
output.transactionBatchEntry.type = BatchType[BatchType.ZLIB]
const compressed = compressBatchWithZlib(
input.extraData.l1TransactionData
)
const copy = { ...extraData }
copy.l1TransactionData = compressed
const res = handleEventsSequencerBatchAppended.parseEvent(
event,
copy,
l2ChainId
)
expect(res).to.deep.eq(output)
})
}
})
}) })
}) })
{ {
"extends": "../../tsconfig.json" "extends": "../../tsconfig.json",
"typeRoots": ["node_modules/@types", "src/@types"]
} }
...@@ -1080,20 +1080,13 @@ ...@@ -1080,20 +1080,13 @@
dependencies: dependencies:
"@ethersproject/logger" "^5.4.0" "@ethersproject/logger" "^5.4.0"
"@ethersproject/networks@5.5.2": "@ethersproject/networks@5.5.2", "@ethersproject/networks@^5.5.0":
version "5.5.2" version "5.5.2"
resolved "https://registry.yarnpkg.com/@ethersproject/networks/-/networks-5.5.2.tgz#784c8b1283cd2a931114ab428dae1bd00c07630b" resolved "https://registry.yarnpkg.com/@ethersproject/networks/-/networks-5.5.2.tgz#784c8b1283cd2a931114ab428dae1bd00c07630b"
integrity sha512-NEqPxbGBfy6O3x4ZTISb90SjEDkWYDUbEeIFhJly0F7sZjoQMnj5KYzMSkMkLKZ+1fGpx00EDpHQCy6PrDupkQ== integrity sha512-NEqPxbGBfy6O3x4ZTISb90SjEDkWYDUbEeIFhJly0F7sZjoQMnj5KYzMSkMkLKZ+1fGpx00EDpHQCy6PrDupkQ==
dependencies: dependencies:
"@ethersproject/logger" "^5.5.0" "@ethersproject/logger" "^5.5.0"
"@ethersproject/networks@^5.5.0":
version "5.5.0"
resolved "https://registry.yarnpkg.com/@ethersproject/networks/-/networks-5.5.0.tgz#babec47cab892c51f8dd652ce7f2e3e14283981a"
integrity sha512-KWfP3xOnJeF89Uf/FCJdV1a2aDJe5XTN2N52p4fcQ34QhDqQFkgQKZ39VGtiqUgHcLI8DfT0l9azC3KFTunqtA==
dependencies:
"@ethersproject/logger" "^5.5.0"
"@ethersproject/pbkdf2@5.4.0", "@ethersproject/pbkdf2@^5.0.0", "@ethersproject/pbkdf2@^5.4.0": "@ethersproject/pbkdf2@5.4.0", "@ethersproject/pbkdf2@^5.0.0", "@ethersproject/pbkdf2@^5.4.0":
version "5.4.0" version "5.4.0"
resolved "https://registry.yarnpkg.com/@ethersproject/pbkdf2/-/pbkdf2-5.4.0.tgz#ed88782a67fda1594c22d60d0ca911a9d669641c" resolved "https://registry.yarnpkg.com/@ethersproject/pbkdf2/-/pbkdf2-5.4.0.tgz#ed88782a67fda1594c22d60d0ca911a9d669641c"
...@@ -1423,7 +1416,7 @@ ...@@ -1423,7 +1416,7 @@
"@ethersproject/properties" "^5.4.0" "@ethersproject/properties" "^5.4.0"
"@ethersproject/strings" "^5.4.0" "@ethersproject/strings" "^5.4.0"
"@ethersproject/web@5.5.1", "@ethersproject/web@^5.5.1": "@ethersproject/web@5.5.1", "@ethersproject/web@^5.5.0", "@ethersproject/web@^5.5.1":
version "5.5.1" version "5.5.1"
resolved "https://registry.yarnpkg.com/@ethersproject/web/-/web-5.5.1.tgz#cfcc4a074a6936c657878ac58917a61341681316" resolved "https://registry.yarnpkg.com/@ethersproject/web/-/web-5.5.1.tgz#cfcc4a074a6936c657878ac58917a61341681316"
integrity sha512-olvLvc1CB12sREc1ROPSHTdFCdvMh0J5GSJYiQg2D0hdD4QmJDy8QYDb1CvoqD/bF1c++aeKv2sR5uduuG9dQg== integrity sha512-olvLvc1CB12sREc1ROPSHTdFCdvMh0J5GSJYiQg2D0hdD4QmJDy8QYDb1CvoqD/bF1c++aeKv2sR5uduuG9dQg==
...@@ -1434,17 +1427,6 @@ ...@@ -1434,17 +1427,6 @@
"@ethersproject/properties" "^5.5.0" "@ethersproject/properties" "^5.5.0"
"@ethersproject/strings" "^5.5.0" "@ethersproject/strings" "^5.5.0"
"@ethersproject/web@^5.5.0":
version "5.5.0"
resolved "https://registry.yarnpkg.com/@ethersproject/web/-/web-5.5.0.tgz#0e5bb21a2b58fb4960a705bfc6522a6acf461e28"
integrity sha512-BEgY0eL5oH4mAo37TNYVrFeHsIXLRxggCRG/ksRIxI2X5uj5IsjGmcNiRN/VirQOlBxcUhCgHhaDLG4m6XAVoA==
dependencies:
"@ethersproject/base64" "^5.5.0"
"@ethersproject/bytes" "^5.5.0"
"@ethersproject/logger" "^5.5.0"
"@ethersproject/properties" "^5.5.0"
"@ethersproject/strings" "^5.5.0"
"@ethersproject/wordlists@5.4.0", "@ethersproject/wordlists@^5.0.0", "@ethersproject/wordlists@^5.4.0": "@ethersproject/wordlists@5.4.0", "@ethersproject/wordlists@^5.0.0", "@ethersproject/wordlists@^5.4.0":
version "5.4.0" version "5.4.0"
resolved "https://registry.yarnpkg.com/@ethersproject/wordlists/-/wordlists-5.4.0.tgz#f34205ec3bbc9e2c49cadaee774cf0b07e7573d7" resolved "https://registry.yarnpkg.com/@ethersproject/wordlists/-/wordlists-5.4.0.tgz#f34205ec3bbc9e2c49cadaee774cf0b07e7573d7"
...@@ -4850,6 +4832,11 @@ bufferutil@^4.0.1: ...@@ -4850,6 +4832,11 @@ bufferutil@^4.0.1:
dependencies: dependencies:
node-gyp-build "^4.2.0" node-gyp-build "^4.2.0"
bufio@^1.0.7:
version "1.0.7"
resolved "https://registry.yarnpkg.com/bufio/-/bufio-1.0.7.tgz#b7f63a1369a0829ed64cc14edf0573b3e382a33e"
integrity sha512-bd1dDQhiC+bEbEfg56IdBv7faWa6OipMs/AFFFvtFnB3wAYjlwQpQRZ0pm6ZkgtfL0pILRXhKxOiQj6UzoMR7A==
builtin-modules@^3.0.0: builtin-modules@^3.0.0:
version "3.2.0" version "3.2.0"
resolved "https://registry.yarnpkg.com/builtin-modules/-/builtin-modules-3.2.0.tgz#45d5db99e7ee5e6bc4f362e008bf917ab5049887" resolved "https://registry.yarnpkg.com/builtin-modules/-/builtin-modules-3.2.0.tgz#45d5db99e7ee5e6bc4f362e008bf917ab5049887"
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment