Commit da09a10d authored by Matthew Slipper's avatar Matthew Slipper Committed by GitHub

Merge pull request #2234 from ethereum-optimism/feat/zlib-compression

feat: typed batches and batch compression
parents d0853b12 b1697ad3
---
'@eth-optimism/integration-tests': patch
---
Add test coverage for zlib compressed batches
---
'@eth-optimism/data-transport-layer': patch
---
Enable typed batch support
---
'@eth-optimism/batch-submitter': patch
---
Update to allow for zlib compressed batches
---
'@eth-optimism/batch-submitter-service': patch
---
Enable the usage of typed batches and type 0 zlib compressed batches
---
'@eth-optimism/core-utils': patch
---
Update batch serialization with typed batches and zlib compression
......@@ -14,3 +14,4 @@ l2geth/signer/fourbyte
l2geth/cmd/puppeth
l2geth/cmd/clef
go/gas-oracle/gas-oracle
go/batch-submitter/batch-submitter
......@@ -20,7 +20,12 @@ jobs:
- 5000:5000
strategy:
matrix:
batch-submitter: [ts-batch-submitter, go-batch-submitter]
batch-submitter:
- ts-batch-submitter
- go-batch-submitter
batch-type:
- zlib
- legacy
env:
DOCKER_BUILDKIT: 1
COMPOSE_DOCKER_CLI_BUILD: 1
......@@ -40,6 +45,10 @@ jobs:
restore-keys: |
${{ runner.os }}-yarn-
- name: Set conditional env vars
run: |
echo "BATCH_SUBMITTER_SEQUENCER_BATCH_TYPE=${{ matrix.batch-type }}" >> $GITHUB_ENV
- name: Bring the stack up
working-directory: ./ops
run: |
......
......@@ -125,6 +125,7 @@ func Main(gitVersion string) func(ctx *cli.Context) error {
CTCAddr: ctcAddress,
ChainID: chainID,
PrivKey: sequencerPrivKey,
BatchType: sequencer.BatchTypeFromString(cfg.SequencerBatchType),
})
if err != nil {
return err
......
......@@ -33,6 +33,11 @@ var (
ErrSameSequencerAndProposerPrivKey = errors.New("sequencer-priv-key and " +
"proposer-priv-key must be distinct")
// ErrInvalidBatchType signals that an unsupported batch type is being
// configured. The default is "legacy" and the options are "legacy" or
// "zlib"
ErrInvalidBatchType = errors.New("invalid batch type")
// ErrSentryDSNNotSet signals that not Data Source Name was provided
// with which to configure Sentry logging.
ErrSentryDSNNotSet = errors.New("sentry-dsn must be set if use-sentry " +
......@@ -164,6 +169,9 @@ type Config struct {
// the proposer transactions.
ProposerHDPath string
// SequencerBatchType represents the type of batch the sequencer submits.
SequencerBatchType string
// MetricsServerEnable if true, will create a metrics client and log to
// Prometheus.
MetricsServerEnable bool
......@@ -212,6 +220,7 @@ func NewConfig(ctx *cli.Context) (Config, error) {
Mnemonic: ctx.GlobalString(flags.MnemonicFlag.Name),
SequencerHDPath: ctx.GlobalString(flags.SequencerHDPathFlag.Name),
ProposerHDPath: ctx.GlobalString(flags.ProposerHDPathFlag.Name),
SequencerBatchType: ctx.GlobalString(flags.SequencerBatchType.Name),
MetricsServerEnable: ctx.GlobalBool(flags.MetricsServerEnableFlag.Name),
MetricsHostname: ctx.GlobalString(flags.MetricsHostnameFlag.Name),
MetricsPort: ctx.GlobalUint64(flags.MetricsPortFlag.Name),
......@@ -265,6 +274,12 @@ func ValidateConfig(cfg *Config) error {
return ErrSameSequencerAndProposerPrivKey
}
usingTypedBatches := cfg.SequencerBatchType != ""
validBatchType := cfg.SequencerBatchType == "legacy" || cfg.SequencerBatchType == "zlib"
if usingTypedBatches && !validBatchType {
return ErrInvalidBatchType
}
// Ensure the Sentry Data Source Name is set when using Sentry.
if cfg.SentryEnable && cfg.SentryDsn == "" {
return ErrSentryDSNNotSet
......
......@@ -78,6 +78,7 @@ func GenSequencerBatchParams(
shouldStartAtElement uint64,
blockOffset uint64,
batch []BatchElement,
batchType BatchType,
) (*AppendSequencerBatchParams, error) {
var (
......@@ -188,5 +189,6 @@ func GenSequencerBatchParams(
TotalElementsToAppend: uint64(len(batch)),
Contexts: contexts,
Txs: txs,
Type: batchType,
}, nil
}
......@@ -36,6 +36,7 @@ type Config struct {
CTCAddr common.Address
ChainID *big.Int
PrivKey *ecdsa.PrivateKey
BatchType BatchType
}
type Driver struct {
......@@ -160,7 +161,7 @@ func (d *Driver) CraftBatchTx(
name := d.cfg.Name
log.Info(name+" crafting batch tx", "start", start, "end", end,
"nonce", nonce)
"nonce", nonce, "type", d.cfg.BatchType.String())
var (
batchElements []BatchElement
......@@ -195,7 +196,7 @@ func (d *Driver) CraftBatchTx(
var pruneCount int
for {
batchParams, err := GenSequencerBatchParams(
shouldStartAt, d.cfg.BlockOffset, batchElements,
shouldStartAt, d.cfg.BlockOffset, batchElements, d.cfg.BatchType,
)
if err != nil {
return nil, err
......
package sequencer
import (
"bufio"
"bytes"
"compress/zlib"
"encoding/binary"
"errors"
"fmt"
"io"
"math"
......@@ -17,7 +20,13 @@ const (
TxLenSize = 3
)
var byteOrder = binary.BigEndian
var (
// byteOrder represents the endiannes used for batch serialization
byteOrder = binary.BigEndian
// ErrMalformedBatch represents a batch that is not well formed
// according to the protocol specification
ErrMalformedBatch = errors.New("malformed batch")
)
// BatchContext denotes a range of transactions that belong the same batch. It
// is used to compress shared fields that would otherwise be repeated for each
......@@ -44,11 +53,14 @@ type BatchContext struct {
// - num_subsequent_queue_txs: 3 bytes
// - timestamp: 5 bytes
// - block_number: 5 bytes
//
// Note that writing to a bytes.Buffer cannot
// error, so errors are ignored here
func (c *BatchContext) Write(w *bytes.Buffer) {
writeUint64(w, c.NumSequencedTxs, 3)
writeUint64(w, c.NumSubsequentQueueTxs, 3)
writeUint64(w, c.Timestamp, 5)
writeUint64(w, c.BlockNumber, 5)
_ = writeUint64(w, c.NumSequencedTxs, 3)
_ = writeUint64(w, c.NumSubsequentQueueTxs, 3)
_ = writeUint64(w, c.Timestamp, 5)
_ = writeUint64(w, c.BlockNumber, 5)
}
// Read decodes the BatchContext from the passed reader. If fewer than 16-bytes
......@@ -71,6 +83,45 @@ func (c *BatchContext) Read(r io.Reader) error {
return readUint64(r, &c.BlockNumber, 5)
}
// BatchType represents the type of batch being
// submitted. When the first context in the batch
// has a timestamp of 0, the blocknumber is interpreted
// as an enum that represets the type
type BatchType int8
// Implements the Stringer interface for BatchType
func (b BatchType) String() string {
switch b {
case BatchTypeLegacy:
return "LEGACY"
case BatchTypeZlib:
return "ZLIB"
default:
return ""
}
}
// BatchTypeFromString returns the BatchType
// enum based on a human readable string
func BatchTypeFromString(s string) BatchType {
switch s {
case "zlib", "ZLIB":
return BatchTypeZlib
case "legacy", "LEGACY":
return BatchTypeLegacy
default:
return BatchTypeLegacy
}
}
const (
// BatchTypeLegacy represets the legacy batch type
BatchTypeLegacy BatchType = -1
// BatchTypeZlib represents a batch type where the
// transaction data is compressed using zlib
BatchTypeZlib BatchType = 0
)
// AppendSequencerBatchParams holds the raw data required to submit a batch of
// L2 txs to L1 CTC contract. Rather than encoding the objects using the
// standard ABI encoding, a custom encoding is and provided in the call data to
......@@ -95,6 +146,9 @@ type AppendSequencerBatchParams struct {
// Txs contains all sequencer txs that will be recorded in the L1 CTC
// contract.
Txs []*CachedTx
// The type of the batch
Type BatchType
}
// Write encodes the AppendSequencerBatchParams using the following format:
......@@ -105,21 +159,74 @@ type AppendSequencerBatchParams struct {
// - [num txs ommitted]
// - tx_len: 3 bytes
// - tx_bytes: tx_len bytes
//
// Typed batches include a dummy context as the first context
// where the timestamp is 0. The blocknumber is interpreted
// as an enum that defines the type. It is impossible to have
// a timestamp of 0 in practice, so this safely can indicate
// that the batch is typed.
// Type 0 batches have a dummy context where the blocknumber is
// set to 0. The transaction data is compressed with zlib before
// submitting the transaction to the chain. The fields should_start_at_element,
// total_elements_to_append, num_contexts and the contexts themselves
// are not altered.
//
// Note that writing to a bytes.Buffer cannot
// error, so errors are ignored here
func (p *AppendSequencerBatchParams) Write(w *bytes.Buffer) error {
writeUint64(w, p.ShouldStartAtElement, 5)
writeUint64(w, p.TotalElementsToAppend, 3)
_ = writeUint64(w, p.ShouldStartAtElement, 5)
_ = writeUint64(w, p.TotalElementsToAppend, 3)
// There must be contexts if there are transactions
if len(p.Contexts) == 0 && len(p.Txs) != 0 {
return ErrMalformedBatch
}
// There must be transactions if there are contexts
if len(p.Txs) == 0 && len(p.Contexts) != 0 {
return ErrMalformedBatch
}
// copy the contexts as to not malleate the struct
// when it is a typed batch
contexts := make([]BatchContext, 0, len(p.Contexts)+1)
if p.Type == BatchTypeZlib {
// All zero values for the single batch context
// is desired here as blocknumber 0 means it is a zlib batch
contexts = append(contexts, BatchContext{})
}
contexts = append(contexts, p.Contexts...)
// Write number of contexts followed by each fixed-size BatchContext.
writeUint64(w, uint64(len(p.Contexts)), 3)
for _, context := range p.Contexts {
_ = writeUint64(w, uint64(len(contexts)), 3)
for _, context := range contexts {
context.Write(w)
}
switch p.Type {
case BatchTypeLegacy:
// Write each length-prefixed tx.
for _, tx := range p.Txs {
writeUint64(w, uint64(tx.Size()), TxLenSize)
_ = writeUint64(w, uint64(tx.Size()), TxLenSize)
_, _ = w.Write(tx.RawTx()) // can't fail for bytes.Buffer
}
case BatchTypeZlib:
zw := zlib.NewWriter(w)
for _, tx := range p.Txs {
if err := writeUint64(zw, uint64(tx.Size()), TxLenSize); err != nil {
return err
}
if _, err := zw.Write(tx.RawTx()); err != nil {
return err
}
}
if err := zw.Close(); err != nil {
return err
}
default:
return fmt.Errorf("Unknown batch type: %s", p.Type)
}
return nil
}
......@@ -159,6 +266,8 @@ func (p *AppendSequencerBatchParams) Read(r io.Reader) error {
return err
}
// Ensure that contexts is never nil
p.Contexts = make([]BatchContext, 0)
for i := uint64(0); i < numContexts; i++ {
var batchContext BatchContext
if err := batchContext.Read(r); err != nil {
......@@ -168,14 +277,44 @@ func (p *AppendSequencerBatchParams) Read(r io.Reader) error {
p.Contexts = append(p.Contexts, batchContext)
}
// Assume that it is a legacy batch at first
p.Type = BatchTypeLegacy
// Handle backwards compatible batch types
if len(p.Contexts) > 0 && p.Contexts[0].Timestamp == 0 {
switch p.Contexts[0].BlockNumber {
case 0:
// zlib compressed transaction data
p.Type = BatchTypeZlib
// remove the first dummy context
p.Contexts = p.Contexts[1:]
numContexts--
zr, err := zlib.NewReader(r)
if err != nil {
return err
}
defer zr.Close()
r = bufio.NewReader(zr)
}
}
// Deserialize any transactions. Since the number of txs is ommitted
// from the encoding, loop until the stream is consumed.
for {
var txLen uint64
err := readUint64(r, &txLen, TxLenSize)
// Getting an EOF when reading the txLen expected for a cleanly
// encoded object. Silece the error and return success.
// encoded object. Silence the error and return success if
// the batch is well formed.
if err == io.EOF {
if len(p.Contexts) == 0 && len(p.Txs) != 0 {
return ErrMalformedBatch
}
if len(p.Txs) == 0 && len(p.Contexts) != 0 {
return ErrMalformedBatch
}
return nil
} else if err != nil {
return err
......@@ -188,10 +327,11 @@ func (p *AppendSequencerBatchParams) Read(r io.Reader) error {
p.Txs = append(p.Txs, NewCachedTx(tx))
}
}
// writeUint64 writes a the bottom `n` bytes of `val` to `w`.
func writeUint64(w *bytes.Buffer, val uint64, n uint) {
func writeUint64(w io.Writer, val uint64, n uint) error {
if n < 1 || n > 8 {
panic(fmt.Sprintf("invalid number of bytes %d must be 1-8", n))
}
......@@ -204,7 +344,8 @@ func writeUint64(w *bytes.Buffer, val uint64, n uint) {
var buf [8]byte
byteOrder.PutUint64(buf[:], val)
_, _ = w.Write(buf[8-n:]) // can't fail for bytes.Buffer
_, err := w.Write(buf[8-n:])
return err
}
// readUint64 reads `n` bytes from `r` and returns them in the lower `n` bytes
......
......@@ -65,167 +65,21 @@ type AppendSequencerBatchParamsTest struct {
TotalElementsToAppend uint64 `json:"total_elements_to_append"`
Contexts []sequencer.BatchContext `json:"contexts"`
Txs []string `json:"txs"`
Error bool `json:"error"`
}
var appendSequencerBatchParamTests = AppendSequencerBatchParamsTestCases{
Tests: []AppendSequencerBatchParamsTest{
{
Name: "empty batch",
HexEncoding: "0000000000000000" +
"000000",
ShouldStartAtElement: 0,
TotalElementsToAppend: 0,
Contexts: nil,
Txs: nil,
},
{
Name: "single tx",
HexEncoding: "0000000001000001" +
"000000" +
"00000ac9808080808080808080",
ShouldStartAtElement: 1,
TotalElementsToAppend: 1,
Contexts: nil,
Txs: []string{
"c9808080808080808080",
},
},
{
Name: "multiple txs",
HexEncoding: "0000000001000004" +
"000000" +
"00000ac9808080808080808080" +
"00000ac9808080808080808080" +
"00000ac9808080808080808080" +
"00000ac9808080808080808080",
ShouldStartAtElement: 1,
TotalElementsToAppend: 4,
Contexts: nil,
Txs: []string{
"c9808080808080808080",
"c9808080808080808080",
"c9808080808080808080",
"c9808080808080808080",
},
},
{
Name: "single context",
HexEncoding: "0000000001000000" +
"000001" +
"000102030405060708090a0b0c0d0e0f",
ShouldStartAtElement: 1,
TotalElementsToAppend: 0,
Contexts: []sequencer.BatchContext{
{
NumSequencedTxs: 0x000102,
NumSubsequentQueueTxs: 0x030405,
Timestamp: 0x060708090a,
BlockNumber: 0x0b0c0d0e0f,
},
},
Txs: nil,
},
{
Name: "multiple contexts",
HexEncoding: "0000000001000000" +
"000004" +
"000102030405060708090a0b0c0d0e0f" +
"000102030405060708090a0b0c0d0e0f" +
"000102030405060708090a0b0c0d0e0f" +
"000102030405060708090a0b0c0d0e0f",
ShouldStartAtElement: 1,
TotalElementsToAppend: 0,
Contexts: []sequencer.BatchContext{
{
NumSequencedTxs: 0x000102,
NumSubsequentQueueTxs: 0x030405,
Timestamp: 0x060708090a,
BlockNumber: 0x0b0c0d0e0f,
},
{
NumSequencedTxs: 0x000102,
NumSubsequentQueueTxs: 0x030405,
Timestamp: 0x060708090a,
BlockNumber: 0x0b0c0d0e0f,
},
{
NumSequencedTxs: 0x000102,
NumSubsequentQueueTxs: 0x030405,
Timestamp: 0x060708090a,
BlockNumber: 0x0b0c0d0e0f,
},
{
NumSequencedTxs: 0x000102,
NumSubsequentQueueTxs: 0x030405,
Timestamp: 0x060708090a,
BlockNumber: 0x0b0c0d0e0f,
},
},
Txs: nil,
},
{
Name: "complex",
HexEncoding: "0102030405060708" +
"000004" +
"000102030405060708090a0b0c0d0e0f" +
"000102030405060708090a0b0c0d0e0f" +
"000102030405060708090a0b0c0d0e0f" +
"000102030405060708090a0b0c0d0e0f" +
"00000ac9808080808080808080" +
"00000ac9808080808080808080" +
"00000ac9808080808080808080" +
"00000ac9808080808080808080",
ShouldStartAtElement: 0x0102030405,
TotalElementsToAppend: 0x060708,
Contexts: []sequencer.BatchContext{
{
NumSequencedTxs: 0x000102,
NumSubsequentQueueTxs: 0x030405,
Timestamp: 0x060708090a,
BlockNumber: 0x0b0c0d0e0f,
},
{
NumSequencedTxs: 0x000102,
NumSubsequentQueueTxs: 0x030405,
Timestamp: 0x060708090a,
BlockNumber: 0x0b0c0d0e0f,
},
{
NumSequencedTxs: 0x000102,
NumSubsequentQueueTxs: 0x030405,
Timestamp: 0x060708090a,
BlockNumber: 0x0b0c0d0e0f,
},
{
NumSequencedTxs: 0x000102,
NumSubsequentQueueTxs: 0x030405,
Timestamp: 0x060708090a,
BlockNumber: 0x0b0c0d0e0f,
},
},
Txs: []string{
"c9808080808080808080",
"c9808080808080808080",
"c9808080808080808080",
"c9808080808080808080",
},
},
},
}
// TestAppendSequencerBatchParamsEncodeDecodeMatchesJSON ensures that the
// in-memory test vectors for valid encode/decode stay in sync with the JSON
// version.
func TestAppendSequencerBatchParamsEncodeDecodeMatchesJSON(t *testing.T) {
t.Parallel()
jsonBytes, err := json.MarshalIndent(appendSequencerBatchParamTests, "", "\t")
require.Nil(t, err)
var appendSequencerBatchParamTests = AppendSequencerBatchParamsTestCases{}
func init() {
data, err := os.ReadFile("./testdata/valid_append_sequencer_batch_params.json")
require.Nil(t, err)
if err != nil {
panic(err)
}
require.Equal(t, jsonBytes, data)
err = json.Unmarshal(data, &appendSequencerBatchParamTests)
if err != nil {
panic(err)
}
}
// TestAppendSequencerBatchParamsEncodeDecode asserts the proper encoding and
......@@ -265,6 +119,7 @@ func testAppendSequencerBatchParamsEncodeDecode(
TotalElementsToAppend: test.TotalElementsToAppend,
Contexts: test.Contexts,
Txs: nil,
Type: sequencer.BatchTypeLegacy,
}
// Decode the batch from the test string.
......@@ -273,7 +128,12 @@ func testAppendSequencerBatchParamsEncodeDecode(
var params sequencer.AppendSequencerBatchParams
err = params.Read(bytes.NewReader(rawBytes))
if test.Error {
require.ErrorIs(t, err, sequencer.ErrMalformedBatch)
} else {
require.Nil(t, err)
}
require.Equal(t, params.Type, sequencer.BatchTypeLegacy)
// Assert that the decoded params match the expected params. The
// transactions are compared serparetly (via hash), since the internal
......@@ -290,8 +150,34 @@ func testAppendSequencerBatchParamsEncodeDecode(
// Finally, encode the decoded object and assert it matches the original
// hex string.
paramsBytes, err := params.Serialize()
// Return early when testing error cases, no need to reserialize again
if test.Error {
require.ErrorIs(t, err, sequencer.ErrMalformedBatch)
return
}
require.Nil(t, err)
require.Equal(t, test.HexEncoding, hex.EncodeToString(paramsBytes))
// Serialize the batches in compressed form
params.Type = sequencer.BatchTypeZlib
compressedParamsBytes, err := params.Serialize()
require.Nil(t, err)
// Deserialize the compressed batch
var paramsCompressed sequencer.AppendSequencerBatchParams
err = paramsCompressed.Read(bytes.NewReader(compressedParamsBytes))
require.Nil(t, err)
require.Equal(t, paramsCompressed.Type, sequencer.BatchTypeZlib)
expParams.Type = sequencer.BatchTypeZlib
decompressedTxs := paramsCompressed.Txs
paramsCompressed.Txs = nil
require.Equal(t, expParams, paramsCompressed)
compareTxs(t, expTxs, decompressedTxs)
paramsCompressed.Txs = decompressedTxs
}
// compareTxs compares a list of two transactions, testing each pair by tx hash.
......
......@@ -194,6 +194,12 @@ var (
"mnemonic. The mnemonic flag must also be set.",
EnvVar: prefixEnvVar("PROPOSER_HD_PATH"),
}
SequencerBatchType = cli.StringFlag{
Name: "sequencer-batch-type",
Usage: "The type of sequencer batch to be submitted. Valid arguments are legacy or zlib.",
Value: "legacy",
EnvVar: prefixEnvVar("SEQUENCER_BATCH_TYPE"),
}
MetricsServerEnableFlag = cli.BoolFlag{
Name: "metrics-server-enable",
Usage: "Whether or not to run the embedded metrics server",
......@@ -245,6 +251,7 @@ var optionalFlags = []cli.Flag{
SentryDsnFlag,
SentryTraceRateFlag,
BlockOffsetFlag,
SequencerBatchType,
SequencerPrivateKeyFlag,
ProposerPrivateKeyFlag,
MnemonicFlag,
......
......@@ -108,6 +108,9 @@ const procEnv = cleanEnv(process.env, {
MOCHA_BAIL: bool({
default: false,
}),
BATCH_SUBMITTER_SEQUENCER_BATCH_TYPE: str({
default: 'zlib',
}),
})
export const envConfig = procEnv
......
import { SequencerBatch, BatchType } from '@eth-optimism/core-utils'
import { expect } from './shared/setup'
import { OptimismEnv } from './shared/env'
import { envConfig } from './shared/utils'
describe('Batch Serialization', () => {
let env: OptimismEnv
// Allow for each type to be tested. The env var here must be
// the same value that is passed to the batch submitter
const batchType = envConfig.BATCH_SUBMITTER_SEQUENCER_BATCH_TYPE.toUpperCase()
before(async () => {
env = await OptimismEnv.new()
})
it('should fetch batches', async () => {
const tip = await env.l1Provider.getBlockNumber()
const logs = await env.ctc.queryFilter(
env.ctc.filters.TransactionBatchAppended(),
0,
tip
)
// collect all of the batches
const batches = []
for (const log of logs) {
const tx = await env.l1Provider.getTransaction(log.transactionHash)
batches.push(tx.data)
}
expect(batches.length).to.be.gt(0, 'Submit some batches first')
let latest = 0
// decode all of the batches
for (const batch of batches) {
// Typings don't work?
const decoded = (SequencerBatch as any).fromHex(batch)
expect(decoded.type).to.eq(BatchType[batchType])
// Iterate over all of the transactions, fetch them
// by hash and make sure their blocknumbers are in
// ascending order. This lets us skip handling deposits here
for (const transaction of decoded.transactions) {
const tx = transaction.toTransaction()
const got = await env.l2Provider.getTransaction(tx.hash)
expect(got).to.not.eq(null)
expect(got.blockNumber).to.be.gt(latest)
latest = got.blockNumber
}
}
})
})
......@@ -14,5 +14,6 @@ services:
L1_ETH_RPC: http://l1_chain:8545
L2_ETH_RPC: http://l2geth:8545
URL: http://deployer:8081/addresses.json
BATCH_SUBMITTER_SEQUENCER_PRIVATE_KEY: "0x59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d"
BATCH_SUBMITTER_PROPOSER_PRIVATE_KEY: "0x5de4111afa1a4b94908f83103eb1f1706367c2e68ca870fc3fb9a804cdab365a"
BATCH_SUBMITTER_SEQUENCER_PRIVATE_KEY: '0x59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d'
BATCH_SUBMITTER_PROPOSER_PRIVATE_KEY: '0x5de4111afa1a4b94908f83103eb1f1706367c2e68ca870fc3fb9a804cdab365a'
BATCH_SUBMITTER_SEQUENCER_BATCH_TYPE: ${BATCH_SUBMITTER_SEQUENCER_BATCH_TYPE:-zlib}
......@@ -15,5 +15,6 @@ services:
L1_NODE_WEB3_URL: http://l1_chain:8545
L2_NODE_WEB3_URL: http://l2geth:8545
URL: http://deployer:8081/addresses.json
SEQUENCER_PRIVATE_KEY: "0x59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d"
PROPOSER_PRIVATE_KEY: "0x5de4111afa1a4b94908f83103eb1f1706367c2e68ca870fc3fb9a804cdab365a"
SEQUENCER_PRIVATE_KEY: '0x59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d'
PROPOSER_PRIVATE_KEY: '0x5de4111afa1a4b94908f83103eb1f1706367c2e68ca870fc3fb9a804cdab365a'
BATCH_SUBMITTER_SEQUENCER_BATCH_TYPE: ${BATCH_SUBMITTER_SEQUENCER_BATCH_TYPE:-zlib}
version: "3.4"
version: '3.4'
services:
# this is a helper service used because there's no official hardhat image
......@@ -24,11 +24,11 @@ services:
environment:
# Env vars for the deployment script.
CONTRACTS_RPC_URL: http://l1_chain:8545
CONTRACTS_DEPLOYER_KEY: "ac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80"
CONTRACTS_TARGET_NETWORK: "custom"
OVM_ADDRESS_MANAGER_OWNER: "0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266"
OVM_PROPOSER_ADDRESS: "0x3c44cdddb6a900fa2b585dd299e03d12fa4293bc"
OVM_SEQUENCER_ADDRESS: "0x70997970c51812dc3a010c7d01b50e0d17dc79c8"
CONTRACTS_DEPLOYER_KEY: 'ac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80'
CONTRACTS_TARGET_NETWORK: 'custom'
OVM_ADDRESS_MANAGER_OWNER: '0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266'
OVM_PROPOSER_ADDRESS: '0x3c44cdddb6a900fa2b585dd299e03d12fa4293bc'
OVM_SEQUENCER_ADDRESS: '0x70997970c51812dc3a010c7d01b50e0d17dc79c8'
SCC_FRAUD_PROOF_WINDOW: 0
NUM_DEPLOY_CONFIRMATIONS: 0
# skip compilation when run in docker-compose, since the contracts
......@@ -37,13 +37,13 @@ services:
# Env vars for the dump script.
# Default hardhat account 5
GAS_PRICE_ORACLE_OWNER: "0x9965507d1a55bcc2695c58ba16fb37d819b0a4dc"
GAS_PRICE_ORACLE_OWNER: '0x9965507d1a55bcc2695c58ba16fb37d819b0a4dc'
# setting the whitelist owner to address(0) disables the whitelist
WHITELIST_OWNER: "0x0000000000000000000000000000000000000000"
L1_FEE_WALLET_ADDRESS: "0x391716d440c151c42cdf1c95c1d83a5427bca52c"
WHITELIST_OWNER: '0x0000000000000000000000000000000000000000'
L1_FEE_WALLET_ADDRESS: '0x391716d440c151c42cdf1c95c1d83a5427bca52c'
L2_CHAIN_ID: 420
L2_BLOCK_GAS_LIMIT: 15000000
BLOCK_SIGNER_ADDRESS: "0x00000398232E2064F896018496b4b44b3D62751F"
BLOCK_SIGNER_ADDRESS: '0x00000398232E2064F896018496b4b44b3D62751F'
GAS_PRICE_ORACLE_OVERHEAD: 2750
GAS_PRICE_ORACLE_SCALAR: 1500000
GAS_PRICE_ORACLE_L1_BASE_FEE: 1
......@@ -99,8 +99,8 @@ services:
ETH1_CTC_DEPLOYMENT_HEIGHT: 8
RETRIES: 60
# no need to keep this secret, only used internally to sign blocks
BLOCK_SIGNER_KEY: "6587ae678cf4fc9a33000cdbf9f35226b71dcc6a4684a31203241f9bcfd55d27"
BLOCK_SIGNER_ADDRESS: "0x00000398232E2064F896018496b4b44b3D62751F"
BLOCK_SIGNER_KEY: '6587ae678cf4fc9a33000cdbf9f35226b71dcc6a4684a31203241f9bcfd55d27'
BLOCK_SIGNER_ADDRESS: '0x00000398232E2064F896018496b4b44b3D62751F'
ROLLUP_ENFORCE_FEES: ${ROLLUP_ENFORCE_FEES:-true}
ROLLUP_FEE_THRESHOLD_DOWN: 0.9
......@@ -126,7 +126,7 @@ services:
L2_NODE_WEB3_URL: http://l2geth:8545
URL: http://deployer:8081/addresses.json
# a funded hardhat account
L1_WALLET_KEY: "0xdbda1821b80551c9d65939329250298aa3472ba22feea921c0cf5d620ea67b97"
L1_WALLET_KEY: '0xdbda1821b80551c9d65939329250298aa3472ba22feea921c0cf5d620ea67b97'
RETRIES: 60
POLLING_INTERVAL: 500
GET_LOGS_INTERVAL: 500
......@@ -199,6 +199,7 @@ services:
URL: http://deployer:8081/addresses.json
ENABLE_GAS_REPORT: 1
NO_NETWORK: 1
BATCH_SUBMITTER_SEQUENCER_BATCH_TYPE: ${BATCH_SUBMITTER_SEQUENCER_BATCH_TYPE:-zlib}
gas_oracle:
deploy:
......@@ -210,4 +211,4 @@ services:
environment:
GAS_PRICE_ORACLE_ETHEREUM_HTTP_URL: http://l2geth:8545
# Default hardhat account 5
GAS_PRICE_ORACLE_PRIVATE_KEY: "0x8b3a350cf5c34c9194ca85829a2df0ec3153be0318b5e2d3348e872092edffba"
GAS_PRICE_ORACLE_PRIVATE_KEY: '0x8b3a350cf5c34c9194ca85829a2df0ec3153be0318b5e2d3348e872092edffba'
......@@ -12,6 +12,7 @@ import {
BatchElement,
Batch,
QueueOrigin,
BatchType,
} from '@eth-optimism/core-utils'
import { Logger, Metrics } from '@eth-optimism/common-ts'
......@@ -39,6 +40,7 @@ export class TransactionBatchSubmitter extends BatchSubmitter {
private validateBatch: boolean
private transactionSubmitter: TransactionSubmitter
private gasThresholdInGwei: number
private batchType: BatchType
constructor(
signer: Signer,
......@@ -61,7 +63,8 @@ export class TransactionBatchSubmitter extends BatchSubmitter {
fixDoublePlayedDeposits: false,
fixMonotonicity: false,
fixSkippedDeposits: false,
} // TODO: Remove this
}, // TODO: Remove this
batchType: string
) {
super(
signer,
......@@ -84,9 +87,18 @@ export class TransactionBatchSubmitter extends BatchSubmitter {
this.gasThresholdInGwei = gasThresholdInGwei
this.transactionSubmitter = transactionSubmitter
this.logger.info('Batch validation options', {
if (batchType === 'legacy') {
this.batchType = BatchType.LEGACY
} else if (batchType === 'zlib') {
this.batchType = BatchType.ZLIB
} else {
throw new Error(`Invalid batch type: ${batchType}`)
}
this.logger.info('Batch options', {
autoFixBatchOptions,
validateBatch,
batchType: BatchType[this.batchType],
})
}
......@@ -295,6 +307,7 @@ export class TransactionBatchSubmitter extends BatchSubmitter {
startBlock,
batch
)
let wasBatchTruncated = false
let encoded = encodeAppendSequencerBatch(sequencerBatchParams)
while (encoded.length / 2 > this.maxTxSize) {
......@@ -313,10 +326,14 @@ export class TransactionBatchSubmitter extends BatchSubmitter {
wasBatchTruncated = true
}
// Set the batch type so that it is serialized correctly
sequencerBatchParams.type = this.batchType
this.logger.info('Generated sequencer batch params', {
contexts: sequencerBatchParams.contexts,
transactions: sequencerBatchParams.transactions,
wasBatchTruncated,
type: BatchType[sequencerBatchParams.type],
})
return [sequencerBatchParams, wasBatchTruncated]
}
......
......@@ -250,6 +250,11 @@ export const run = async () => {
env.VALIDATE_TX_BATCH ? env.VALIDATE_TX_BATCH === 'true' : false
)
const SEQUENCER_BATCH_TYPE = config.str(
'sequencer-batch-type',
env.SEQUENCER_BATCH_TYPE || 'legacy'
)
// Auto fix batch options -- TODO: Remove this very hacky config
const AUTO_FIX_BATCH_OPTIONS_CONF = config.str(
'auto-fix-batch-conf',
......@@ -402,7 +407,8 @@ export const run = async () => {
VALIDATE_TX_BATCH,
logger.child({ name: TX_BATCH_SUBMITTER_LOG_TAG }),
metrics,
autoFixBatchOptions
autoFixBatchOptions,
SEQUENCER_BATCH_TYPE
)
const stateBatchTxSubmitter: TransactionSubmitter =
......
......@@ -4,12 +4,11 @@ import {
TransactionResponse,
TransactionRequest,
} from '@ethersproject/abstract-provider'
import { keccak256 } from 'ethers/lib/utils'
import {
AppendSequencerBatchParams,
BatchContext,
encodeAppendSequencerBatch,
remove0x,
sequencerBatch,
} from '@eth-optimism/core-utils'
export { encodeAppendSequencerBatch, BatchContext, AppendSequencerBatchParams }
......@@ -52,10 +51,6 @@ export class CanonicalTransactionChainContract extends Contract {
* Internal Functions *
*********************/
const APPEND_SEQUENCER_BATCH_METHOD_ID = keccak256(
Buffer.from('appendSequencerBatch()')
).slice(2, 10)
const appendSequencerBatch = async (
OVM_CanonicalTransactionChain: Contract,
batch: AppendSequencerBatchParams,
......@@ -68,8 +63,6 @@ const appendSequencerBatch = async (
})
}
const getEncodedCalldata = (batch: AppendSequencerBatchParams): string => {
const methodId = APPEND_SEQUENCER_BATCH_METHOD_ID
const calldata = encodeAppendSequencerBatch(batch)
return '0x' + remove0x(methodId) + remove0x(calldata)
const getEncodedCalldata = (params: AppendSequencerBatchParams): string => {
return sequencerBatch.encode(params)
}
......@@ -226,7 +226,13 @@ describe('BatchSubmitter', () => {
1,
false,
new Logger({ name: TX_BATCH_SUBMITTER_LOG_TAG }),
testMetrics
testMetrics,
{
fixDoublePlayedDeposits: false,
fixMonotonicity: false,
fixSkippedDeposits: false,
},
'legacy'
)
}
......
......@@ -49,7 +49,7 @@ task('set-l2-gasprice')
const GasPriceOracle = new ethers.Contract(
predeploys.OVM_GasPriceOracle,
GasPriceOracleArtifact.abi,
provider
signer
)
const addr = await signer.getAddress()
......
......@@ -35,7 +35,9 @@
"@ethersproject/abstract-provider": "^5.5.1",
"@ethersproject/bytes": "^5.5.0",
"@ethersproject/providers": "^5.5.3",
"@ethersproject/transactions": "^5.5.0",
"@ethersproject/web": "^5.5.1",
"bufio": "^1.0.7",
"chai": "^4.3.4",
"ethers": "^5.5.4"
},
......
declare module 'bufio' {
class BufferWriter {
public offset: number
constructor()
render(): Buffer
getSize(): number
seek(offset: number): this
destroy(): this
writeU8(n: number): this
writeU16(n: number): this
writeU16BE(n: number): this
writeU24(n: number): this
writeU24BE(n: number): this
writeU32(n: number): this
writeU32BE(n: number): this
writeU40(n: number): this
writeU40BE(n: number): this
writeU48(n: number): this
writeU48BE(n: number): this
writeU56(n: number): this
writeU56BE(n: number): this
writeU64(n: number): this
writeU64BE(n: number): this
writeBytes(b: Buffer): this
copy(value: number, start: number, end: number): this
}
class BufferReader {
constructor(data: Buffer, copy?: boolean)
getSize(): number
check(n: number): void
left(): number
seek(offset: number): this
start(): number
end(): number
destroy(): this
readU8(): number
readU16(): number
readU16BE(): number
readU24(): number
readU24BE(): number
readU32(): number
readU32BE(): number
readU40(): number
readU40BE(): number
readU48(): number
readU48BE(): number
readU56(): number
readU56BE(): number
readU64(): number
readU64BE(): number
readBytes(size: number, copy?: boolean): Buffer
}
class Struct {
constructor()
encode(extra?: object): Buffer
decode<T extends Struct>(data: Buffer, extra?: object): T
getSize(extra?: object): number
fromHex(s: string, extra?: object): this
toHex(): string
write(bw: BufferWriter, extra?: object): BufferWriter
read(br: BufferReader, extra?: object): this
static read<T extends Struct>(br: BufferReader, extra?: object): T
static decode<T extends Struct>(data: Buffer, extra?: object): T
static fromHex<T extends Struct>(s: string, extra?: object): T
}
}
......@@ -7,23 +7,31 @@ import {
encodeAppendSequencerBatch,
decodeAppendSequencerBatch,
sequencerBatch,
BatchType,
SequencerBatch,
} from '../src'
describe('BatchEncoder', () => {
describe('BatchEncoder', function () {
this.timeout(10_000)
// eslint-disable-next-line @typescript-eslint/no-var-requires
const data = require('./fixtures/calldata.json')
describe('appendSequencerBatch', () => {
it('should work with the simple case', () => {
it('legacy: should work with the simple case', () => {
const batch = {
shouldStartAtElement: 0,
totalElementsToAppend: 0,
contexts: [],
transactions: [],
type: BatchType.LEGACY,
}
const encoded = encodeAppendSequencerBatch(batch)
const decoded = decodeAppendSequencerBatch(encoded)
expect(decoded).to.deep.equal(batch)
})
it('should work with more complex case', () => {
it('legacy: should work with more complex case', () => {
const batch = {
shouldStartAtElement: 10,
totalElementsToAppend: 1,
......@@ -36,19 +44,57 @@ describe('BatchEncoder', () => {
},
],
transactions: ['0x45423400000011', '0x45423400000012'],
type: BatchType.LEGACY,
}
const encoded = encodeAppendSequencerBatch(batch)
const decoded = decodeAppendSequencerBatch(encoded)
expect(decoded).to.deep.equal(batch)
})
it('should work with mainnet calldata', () => {
// eslint-disable-next-line @typescript-eslint/no-var-requires
const data = require('./fixtures/appendSequencerBatch.json')
for (const calldata of data.calldata) {
const decoded = sequencerBatch.decode(calldata)
const encoded = sequencerBatch.encode(decoded)
expect(encoded).to.equal(calldata)
describe('mainnet data', () => {
for (const [hash, calldata] of Object.entries(data)) {
// Deserialize the raw calldata
const decoded = SequencerBatch.fromHex<SequencerBatch>(
calldata as string
)
it(`${hash}`, () => {
const encoded = decoded.toHex()
expect(encoded).to.deep.equal(calldata)
const batch = SequencerBatch.decode(decoded.encode())
expect(decoded).to.deep.eq(batch)
})
it(`${hash} (compressed)`, () => {
// Set the batch type to be zlib so that the batch
// is compressed
decoded.type = BatchType.ZLIB
// Encode a compressed batch
const encodedCompressed = decoded.encode()
// Decode a compressed batch
const decodedPostCompressed =
SequencerBatch.decode<SequencerBatch>(encodedCompressed)
// Expect that the batch type is detected
expect(decodedPostCompressed.type).to.eq(BatchType.ZLIB)
// Expect that the contexts match
expect(decoded.contexts).to.deep.equal(decodedPostCompressed.contexts)
for (const [i, tx] of decoded.transactions.entries()) {
const got = decodedPostCompressed.transactions[i]
expect(got).to.deep.eq(tx)
}
// Reserialize the batch as legacy
decodedPostCompressed.type = BatchType.LEGACY
// Ensure that the original data can be recovered
const encoded = decodedPostCompressed.toHex()
expect(encoded).to.deep.equal(calldata)
})
it(`${hash}: serialize txs`, () => {
for (const tx of decoded.transactions) {
tx.toTransaction()
}
})
}
})
......
This diff is collapsed.
{
"extends": "../../tsconfig.json"
"extends": "../../tsconfig.json",
"typeRoots": ["node_modules/@types", "src/@types"]
}
/* Imports: External */
import { LevelUp } from 'levelup'
import { BigNumber } from 'ethers'
import { BatchType } from '@eth-optimism/core-utils'
/* Imports: Internal */
import { SimpleDB } from './simple-db'
......@@ -127,7 +128,14 @@ export class TransportDB {
public async getTransactionBatchByIndex(
index: number
): Promise<TransactionBatchEntry> {
return this._getEntryByIndex(TRANSPORT_DB_KEYS.TRANSACTION_BATCH, index)
const entry = (await this._getEntryByIndex(
TRANSPORT_DB_KEYS.TRANSACTION_BATCH,
index
)) as TransactionBatchEntry
if (entry && typeof entry.type === 'undefined') {
entry.type = BatchType[BatchType.LEGACY]
}
return entry
}
public async getStateRootByIndex(index: number): Promise<StateRootEntry> {
......@@ -169,7 +177,13 @@ export class TransportDB {
}
public async getLatestTransactionBatch(): Promise<TransactionBatchEntry> {
return this._getLatestEntry(TRANSPORT_DB_KEYS.TRANSACTION_BATCH)
const entry = (await this._getLatestEntry(
TRANSPORT_DB_KEYS.TRANSACTION_BATCH
)) as TransactionBatchEntry
if (entry && typeof entry.type === 'undefined') {
entry.type = BatchType[BatchType.LEGACY]
}
return entry
}
public async getLatestStateRoot(): Promise<StateRootEntry> {
......
/* Imports: External */
import { BigNumber, ethers, constants } from 'ethers'
import { serialize, Transaction } from '@ethersproject/transactions'
import { getContractFactory } from '@eth-optimism/contracts'
import {
fromHexString,
toHexString,
toRpcHexString,
BatchType,
SequencerBatch,
} from '@eth-optimism/core-utils'
import { SequencerBatchAppendedEvent } from '@eth-optimism/contracts/dist/types/CanonicalTransactionChain'
......@@ -76,33 +78,33 @@ export const handleEventsSequencerBatchAppended: EventHandlerSet<
parseEvent: (event, extraData, l2ChainId) => {
const transactionEntries: TransactionEntry[] = []
// It's easier to deal with this data if it's a Buffer.
const calldata = fromHexString(extraData.l1TransactionData)
if (calldata.length < 12) {
// 12 * 2 + 2 = 26
if (extraData.l1TransactionData.length < 26) {
throw new Error(
`Block ${extraData.blockNumber} transaction data is invalid for decoding: ${extraData.l1TransactionData} , ` +
`converted buffer length is < 12.`
`Block ${extraData.blockNumber} transaction data is too small: ${extraData.l1TransactionData.length}`
)
}
const numContexts = BigNumber.from(calldata.slice(12, 15)).toNumber()
// TODO: typings not working?
const decoded = (SequencerBatch as any).fromHex(extraData.l1TransactionData)
// Keep track of the CTC index
let transactionIndex = 0
// Keep track of the number of deposits
let enqueuedCount = 0
let nextTxPointer = 15 + 16 * numContexts
for (let i = 0; i < numContexts; i++) {
const contextPointer = 15 + 16 * i
const context = parseSequencerBatchContext(calldata, contextPointer)
// Keep track of the tx index in the current batch
let index = 0
for (const context of decoded.contexts) {
for (let j = 0; j < context.numSequencedTransactions; j++) {
const sequencerTransaction = parseSequencerBatchTransaction(
calldata,
nextTxPointer
const buf = decoded.transactions[index]
if (!buf) {
throw new Error(
`Invalid batch context, tx count: ${decoded.transactions.length}, attempting to parse ${index}`
)
}
const decoded = decodeSequencerBatchTransaction(
sequencerTransaction,
l2ChainId
)
const tx = buf.toTransaction()
transactionEntries.push({
index: extraData.prevTotalElements
......@@ -114,16 +116,29 @@ export const handleEventsSequencerBatchAppended: EventHandlerSet<
gasLimit: BigNumber.from(0).toString(),
target: constants.AddressZero,
origin: null,
data: toHexString(sequencerTransaction),
data: serialize(
{
nonce: tx.nonce,
gasPrice: tx.gasPrice,
gasLimit: tx.gasLimit,
to: tx.to,
value: tx.value,
data: tx.data,
},
{
v: tx.v,
r: tx.r,
s: tx.s,
}
),
queueOrigin: 'sequencer',
value: decoded.value,
value: toRpcHexString(tx.value),
queueIndex: null,
decoded,
decoded: mapSequencerTransaction(tx, l2ChainId),
confirmed: true,
})
nextTxPointer += 3 + sequencerTransaction.length
transactionIndex++
index++
}
for (let j = 0; j < context.numSubsequentQueueTransactions; j++) {
......@@ -169,6 +184,7 @@ export const handleEventsSequencerBatchAppended: EventHandlerSet<
timestamp: BigNumber.from(extraData.timestamp).toNumber(),
submitter: extraData.submitter,
l1TransactionHash: extraData.l1TransactionHash,
type: BatchType[decoded.type],
}
return {
......@@ -206,61 +222,21 @@ export const handleEventsSequencerBatchAppended: EventHandlerSet<
},
}
interface SequencerBatchContext {
numSequencedTransactions: number
numSubsequentQueueTransactions: number
timestamp: number
blockNumber: number
}
const parseSequencerBatchContext = (
calldata: Buffer,
offset: number
): SequencerBatchContext => {
return {
numSequencedTransactions: BigNumber.from(
calldata.slice(offset, offset + 3)
).toNumber(),
numSubsequentQueueTransactions: BigNumber.from(
calldata.slice(offset + 3, offset + 6)
).toNumber(),
timestamp: BigNumber.from(
calldata.slice(offset + 6, offset + 11)
).toNumber(),
blockNumber: BigNumber.from(
calldata.slice(offset + 11, offset + 16)
).toNumber(),
}
}
const parseSequencerBatchTransaction = (
calldata: Buffer,
offset: number
): Buffer => {
const transactionLength = BigNumber.from(
calldata.slice(offset, offset + 3)
).toNumber()
return calldata.slice(offset + 3, offset + 3 + transactionLength)
}
const decodeSequencerBatchTransaction = (
transaction: Buffer,
const mapSequencerTransaction = (
tx: Transaction,
l2ChainId: number
): DecodedSequencerBatchTransaction => {
const decodedTx = ethers.utils.parseTransaction(transaction)
return {
nonce: BigNumber.from(decodedTx.nonce).toString(),
gasPrice: BigNumber.from(decodedTx.gasPrice).toString(),
gasLimit: BigNumber.from(decodedTx.gasLimit).toString(),
value: toRpcHexString(decodedTx.value),
target: decodedTx.to ? toHexString(decodedTx.to) : null,
data: toHexString(decodedTx.data),
nonce: BigNumber.from(tx.nonce).toString(),
gasPrice: BigNumber.from(tx.gasPrice).toString(),
gasLimit: BigNumber.from(tx.gasLimit).toString(),
value: toRpcHexString(tx.value),
target: tx.to ? toHexString(tx.to) : null,
data: toHexString(tx.data),
sig: {
v: parseSignatureVParam(decodedTx.v, l2ChainId),
r: toHexString(decodedTx.r),
s: toHexString(decodedTx.s),
v: parseSignatureVParam(tx.v, l2ChainId),
r: toHexString(tx.r),
s: toHexString(tx.s),
},
}
}
......@@ -60,6 +60,7 @@ export const handleEventsStateBatchAppended: EventHandlerSet<
prevTotalElements: event.args._prevTotalElements.toNumber(),
extraData: event.args._extraData,
l1TransactionHash: extraData.l1TransactionHash,
type: 'LEGACY', // There is currently only 1 state root batch type
}
return {
......
......@@ -48,6 +48,7 @@ interface BatchEntry {
prevTotalElements: number
extraData: string
l1TransactionHash: string
type: string
}
export type TransactionBatchEntry = BatchEntry
......
/* External Imports */
import fs from 'fs'
import path from 'path'
import chai = require('chai')
import Mocha from 'mocha'
import chaiAsPromised from 'chai-as-promised'
import { BigNumber } from 'ethers'
// Chai plugins go here.
chai.use(chaiAsPromised)
......@@ -9,4 +12,38 @@ chai.use(chaiAsPromised)
const should = chai.should()
const expect = chai.expect
export { should, expect, Mocha }
const readMockData = () => {
const mockDataPath = path.join(__dirname, 'unit-tests', 'examples')
const paths = fs.readdirSync(mockDataPath)
const files = []
for (const filename of paths) {
// Skip non .txt files
if (!filename.endsWith('.txt')) {
continue
}
const filePath = path.join(mockDataPath, filename)
const file = fs.readFileSync(filePath)
const obj = JSON.parse(file.toString())
// Reserialize the BigNumbers
obj.input.extraData.prevTotalElements = BigNumber.from(
obj.input.extraData.prevTotalElements
)
obj.input.extraData.batchIndex = BigNumber.from(
obj.input.extraData.batchIndex
)
if (obj.input.event.args.length !== 3) {
throw new Error(`ABI mismatch`)
}
obj.input.event.args = obj.input.event.args.map(BigNumber.from)
obj.input.event.args._startingQueueIndex = obj.input.event.args[0]
obj.input.event.args._numQueueElements = obj.input.event.args[1]
obj.input.event.args._totalElements = obj.input.event.args[2]
obj.input.extraData.batchSize = BigNumber.from(
obj.input.extraData.batchSize
)
files.push(obj)
}
return files
}
export { should, expect, Mocha, readMockData }
import { BigNumber, ethers } from 'ethers'
import { sequencerBatch, add0x, BatchType } from '@eth-optimism/core-utils'
const compressBatchWithZlib = (calldata: string): string => {
const batch = sequencerBatch.decode(calldata)
batch.type = BatchType.ZLIB
const encoded = sequencerBatch.encode(batch)
return add0x(encoded)
}
/* Imports: Internal */
import { expect } from '../../../../setup'
import { expect, readMockData } from '../../../../setup'
import { handleEventsSequencerBatchAppended } from '../../../../../src/services/l1-ingestion/handlers/sequencer-batch-appended'
import { SequencerBatchAppendedExtraData } from '../../../../../src/types'
describe('Event Handlers: CanonicalTransactionChain.SequencerBatchAppended', () => {
const mockData = readMockData()
describe('handleEventsSequencerBatchAppended.parseEvent', () => {
// This tests the behavior of parsing a real mainnet transaction,
// so it will break if the encoding scheme changes.
......@@ -46,9 +56,53 @@ describe('Event Handlers: CanonicalTransactionChain.SequencerBatchAppended', ()
expect(() => {
handleEventsSequencerBatchAppended.parseEvent(...input1)
}).to.throw(
`Block ${input1[1].blockNumber} transaction data is invalid for decoding: ${input1[1].l1TransactionData} , ` +
`converted buffer length is < 12.`
`Block ${input1[1].blockNumber} transaction data is too small: ${input1[1].l1TransactionData.length}`
)
})
describe('mainnet transactions', () => {
for (const mock of mockData) {
const { input, output } = mock
const { event, extraData, l2ChainId } = input
const hash = mock.input.extraData.l1TransactionHash
it(`uncompressed: ${hash}`, () => {
// Set the type to be legacy
output.transactionBatchEntry.type = BatchType[BatchType.LEGACY]
const res = handleEventsSequencerBatchAppended.parseEvent(
event,
extraData,
l2ChainId
)
// Check all of the transaction entries individually
for (const [i, got] of res.transactionEntries.entries()) {
const expected = output.transactionEntries[i]
expect(got).to.deep.eq(expected, `case ${i}`)
}
expect(res).to.deep.eq(output)
})
it(`compressed: ${hash}`, () => {
// Set the type to be zlib
output.transactionBatchEntry.type = BatchType[BatchType.ZLIB]
const compressed = compressBatchWithZlib(
input.extraData.l1TransactionData
)
const copy = { ...extraData }
copy.l1TransactionData = compressed
const res = handleEventsSequencerBatchAppended.parseEvent(
event,
copy,
l2ChainId
)
expect(res).to.deep.eq(output)
})
}
})
})
})
{
"extends": "../../tsconfig.json"
"extends": "../../tsconfig.json",
"typeRoots": ["node_modules/@types", "src/@types"]
}
......@@ -1080,20 +1080,13 @@
dependencies:
"@ethersproject/logger" "^5.4.0"
"@ethersproject/networks@5.5.2":
"@ethersproject/networks@5.5.2", "@ethersproject/networks@^5.5.0":
version "5.5.2"
resolved "https://registry.yarnpkg.com/@ethersproject/networks/-/networks-5.5.2.tgz#784c8b1283cd2a931114ab428dae1bd00c07630b"
integrity sha512-NEqPxbGBfy6O3x4ZTISb90SjEDkWYDUbEeIFhJly0F7sZjoQMnj5KYzMSkMkLKZ+1fGpx00EDpHQCy6PrDupkQ==
dependencies:
"@ethersproject/logger" "^5.5.0"
"@ethersproject/networks@^5.5.0":
version "5.5.0"
resolved "https://registry.yarnpkg.com/@ethersproject/networks/-/networks-5.5.0.tgz#babec47cab892c51f8dd652ce7f2e3e14283981a"
integrity sha512-KWfP3xOnJeF89Uf/FCJdV1a2aDJe5XTN2N52p4fcQ34QhDqQFkgQKZ39VGtiqUgHcLI8DfT0l9azC3KFTunqtA==
dependencies:
"@ethersproject/logger" "^5.5.0"
"@ethersproject/pbkdf2@5.4.0", "@ethersproject/pbkdf2@^5.0.0", "@ethersproject/pbkdf2@^5.4.0":
version "5.4.0"
resolved "https://registry.yarnpkg.com/@ethersproject/pbkdf2/-/pbkdf2-5.4.0.tgz#ed88782a67fda1594c22d60d0ca911a9d669641c"
......@@ -1423,7 +1416,7 @@
"@ethersproject/properties" "^5.4.0"
"@ethersproject/strings" "^5.4.0"
"@ethersproject/web@5.5.1", "@ethersproject/web@^5.5.1":
"@ethersproject/web@5.5.1", "@ethersproject/web@^5.5.0", "@ethersproject/web@^5.5.1":
version "5.5.1"
resolved "https://registry.yarnpkg.com/@ethersproject/web/-/web-5.5.1.tgz#cfcc4a074a6936c657878ac58917a61341681316"
integrity sha512-olvLvc1CB12sREc1ROPSHTdFCdvMh0J5GSJYiQg2D0hdD4QmJDy8QYDb1CvoqD/bF1c++aeKv2sR5uduuG9dQg==
......@@ -1434,17 +1427,6 @@
"@ethersproject/properties" "^5.5.0"
"@ethersproject/strings" "^5.5.0"
"@ethersproject/web@^5.5.0":
version "5.5.0"
resolved "https://registry.yarnpkg.com/@ethersproject/web/-/web-5.5.0.tgz#0e5bb21a2b58fb4960a705bfc6522a6acf461e28"
integrity sha512-BEgY0eL5oH4mAo37TNYVrFeHsIXLRxggCRG/ksRIxI2X5uj5IsjGmcNiRN/VirQOlBxcUhCgHhaDLG4m6XAVoA==
dependencies:
"@ethersproject/base64" "^5.5.0"
"@ethersproject/bytes" "^5.5.0"
"@ethersproject/logger" "^5.5.0"
"@ethersproject/properties" "^5.5.0"
"@ethersproject/strings" "^5.5.0"
"@ethersproject/wordlists@5.4.0", "@ethersproject/wordlists@^5.0.0", "@ethersproject/wordlists@^5.4.0":
version "5.4.0"
resolved "https://registry.yarnpkg.com/@ethersproject/wordlists/-/wordlists-5.4.0.tgz#f34205ec3bbc9e2c49cadaee774cf0b07e7573d7"
......@@ -4850,6 +4832,11 @@ bufferutil@^4.0.1:
dependencies:
node-gyp-build "^4.2.0"
bufio@^1.0.7:
version "1.0.7"
resolved "https://registry.yarnpkg.com/bufio/-/bufio-1.0.7.tgz#b7f63a1369a0829ed64cc14edf0573b3e382a33e"
integrity sha512-bd1dDQhiC+bEbEfg56IdBv7faWa6OipMs/AFFFvtFnB3wAYjlwQpQRZ0pm6ZkgtfL0pILRXhKxOiQj6UzoMR7A==
builtin-modules@^3.0.0:
version "3.2.0"
resolved "https://registry.yarnpkg.com/builtin-modules/-/builtin-modules-3.2.0.tgz#45d5db99e7ee5e6bc4f362e008bf917ab5049887"
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment