Commit 5c5fcaa0 authored by Conner Fromknecht's avatar Conner Fromknecht

feat: add batch tx encoding and test vectors

parent ece7de14
package sequencer
import (
"bytes"
"encoding/binary"
"fmt"
"io"
"math"
l2types "github.com/ethereum-optimism/optimism/l2geth/core/types"
l2rlp "github.com/ethereum-optimism/optimism/l2geth/rlp"
)
var byteOrder = binary.BigEndian
// BatchContext denotes a range of transactions that belong the same batch. It
// is used to compress shared fields that would otherwise be repeated for each
// transaction.
type BatchContext struct {
// NumSequencedTxs specifies the number of sequencer txs included in
// the batch.
NumSequencedTxs uint64 `json:"num_sequenced_txs"`
// NumSubsequentQueueTxs specifies the number of queued txs included in
// the batch
NumSubsequentQueueTxs uint64 `json:"num_subsequent_queue_txs"`
// Timestamp is the L1 timestamp of the batch.
Timestamp uint64 `json:"timestamp"`
// BlockNumber is the L1 BlockNumber of the batch.
BlockNumber uint64 `json:"block_number"`
}
// Write encodes the BatchContext into a 16-byte stream using the following
// encoding:
// - num_sequenced_txs: 3 bytes
// - num_subsequent_queue_txs: 3 bytes
// - timestamp: 5 bytes
// - block_number: 5 bytes
func (c *BatchContext) Write(w *bytes.Buffer) {
writeUint64(w, c.NumSequencedTxs, 3)
writeUint64(w, c.NumSubsequentQueueTxs, 3)
writeUint64(w, c.Timestamp, 5)
writeUint64(w, c.BlockNumber, 5)
}
// Read decodes the BatchContext from the passed reader. If fewer than 16-bytes
// remain, an error is returned. Otherwise the first 16-bytes will be read using
// the expected encoding:
// - num_sequenced_txs: 3 bytes
// - num_subsequent_queue_txs: 3 bytes
// - timestamp: 5 bytes
// - block_number: 5 bytes
func (c *BatchContext) Read(r io.Reader) error {
if err := readUint64(r, &c.NumSequencedTxs, 3); err != nil {
return err
}
if err := readUint64(r, &c.NumSubsequentQueueTxs, 3); err != nil {
return err
}
if err := readUint64(r, &c.Timestamp, 5); err != nil {
return err
}
return readUint64(r, &c.BlockNumber, 5)
}
// AppendSequencerBatchParams holds the raw data required to submit a batch of
// L2 txs to L1 CTC contract. Rather than encoding the objects using the
// standard ABI encoding, a custom encoding is and provided in the call data to
// optimize for gas fees, since batch submission of L2 txs is a primary cost
// driver.
type AppendSequencerBatchParams struct {
// ShouldStartAtElement specifies the intended starting sequence number
// of the provided transaction. Upon submission, this should match the
// CTC's expected value otherwise the transaction will revert.
ShouldStartAtElement uint64
// TotalElementsToAppend indicates the number of L2 txs represented by
// this batch. This includes both sequencer and queued txs.
TotalElementsToAppend uint64
// Contexts aggregates redundant L1 block numbers and L1 timestamps for
// the txns encoded in the Tx slice. Further, they specify consecutive
// tx windows in Txs and implicitly allow one to compute how many
// (ommitted) queued txs are in a given window.
Contexts []BatchContext
// Txs contains all sequencer txs that will be recorded in the L1 CTC
// contract.
Txs []*l2types.Transaction
}
// Write encodes the AppendSequencerBatchParams using the following format:
// - should_start_at_element: 5 bytes
// - total_elements_to_append: 3 bytes
// - num_contexts: 3 bytes
// - num_contexts * batch_context: num_contexts * 16 bytes
// - [num txs ommitted]
// - tx_len: 3 bytes
// - tx_bytes: tx_len bytes
func (p *AppendSequencerBatchParams) Write(w *bytes.Buffer) error {
writeUint64(w, p.ShouldStartAtElement, 5)
writeUint64(w, p.TotalElementsToAppend, 3)
// Write number of contexts followed by each fixed-size BatchContext.
writeUint64(w, uint64(len(p.Contexts)), 3)
for _, context := range p.Contexts {
context.Write(w)
}
// Write each length-prefixed tx.
var txBuf bytes.Buffer
for _, tx := range p.Txs {
txBuf.Reset()
if err := tx.EncodeRLP(&txBuf); err != nil {
return err
}
writeUint64(w, uint64(txBuf.Len()), 3)
_, _ = w.Write(txBuf.Bytes()) // can't fail for bytes.Buffer
}
return nil
}
// Serialize performs the same encoding as Write, but returns the resulting
// bytes slice.
func (p *AppendSequencerBatchParams) Serialize() ([]byte, error) {
var buf bytes.Buffer
if err := p.Write(&buf); err != nil {
return nil, err
}
return buf.Bytes(), nil
}
// Read decodes the AppendSequencerBatchParams from a bytes stream. If the byte
// stream does not terminate cleanly with an EOF while reading a tx_len, this
// method will return an error. Otherwise, the stream will be parsed according
// to the following format:
// - should_start_at_element: 5 bytes
// - total_elements_to_append: 3 bytes
// - num_contexts: 3 bytes
// - num_contexts * batch_context: num_contexts * 16 bytes
// - [num txs ommitted]
// - tx_len: 3 bytes
// - tx_bytes: tx_len bytes
func (p *AppendSequencerBatchParams) Read(r io.Reader) error {
if err := readUint64(r, &p.ShouldStartAtElement, 5); err != nil {
return err
}
if err := readUint64(r, &p.TotalElementsToAppend, 3); err != nil {
return err
}
// Read number of contexts and deserialize each one.
var numContexts uint64
if err := readUint64(r, &numContexts, 3); err != nil {
return err
}
for i := uint64(0); i < numContexts; i++ {
var batchContext BatchContext
if err := batchContext.Read(r); err != nil {
return err
}
p.Contexts = append(p.Contexts, batchContext)
}
// Deserialize any transactions. Since the number of txs is ommitted
// from the encoding, loop until the stream is consumed.
for {
var txLen uint64
err := readUint64(r, &txLen, 3)
// Getting an EOF when reading the txLen expected for a cleanly
// encoded object. Silece the error and return success.
if err == io.EOF {
return nil
} else if err != nil {
return err
}
tx := new(l2types.Transaction)
if err := tx.DecodeRLP(l2rlp.NewStream(r, txLen)); err != nil {
return err
}
p.Txs = append(p.Txs, tx)
}
}
// writeUint64 writes a the bottom `n` bytes of `val` to `w`.
func writeUint64(w *bytes.Buffer, val uint64, n uint) {
if n < 1 || n > 8 {
panic(fmt.Sprintf("invalid number of bytes %d must be 1-8", n))
}
const maxUint64 uint64 = math.MaxUint64
maxVal := maxUint64 >> (8 * (8 - n))
if val > maxVal {
panic(fmt.Sprintf("cannot encode %d in %d byte value", val, n))
}
var buf [8]byte
byteOrder.PutUint64(buf[:], val)
_, _ = w.Write(buf[8-n:]) // can't fail for bytes.Buffer
}
// readUint64 reads `n` bytes from `r` and returns them in the lower `n` bytes
// of `val`.
func readUint64(r io.Reader, val *uint64, n uint) error {
var buf [8]byte
if _, err := r.Read(buf[8-n:]); err != nil {
return err
}
*val = byteOrder.Uint64(buf[:])
return nil
}
package sequencer_test
import (
"bytes"
"encoding/hex"
"encoding/json"
"os"
"testing"
"github.com/ethereum-optimism/optimism/go/batch-submitter/drivers/sequencer"
l2types "github.com/ethereum-optimism/optimism/l2geth/core/types"
l2rlp "github.com/ethereum-optimism/optimism/l2geth/rlp"
"github.com/stretchr/testify/require"
)
// TestBatchContextEncodeDecode tests the (de)serialization of a BatchContext
// against the spec test vector. The encoding should be:
// - num_sequenced_txs: 3 bytes
// - num_subsequent_queue_txs: 3 bytes
// - timestamp: 5 bytes
// - block_number: 5 bytes
func TestBatchContextEncodeDecode(t *testing.T) {
t.Parallel()
// Test vector is chosen such that each byte maps one to one with a
// specific byte of the parsed BatchContext and such that improper
// choice of endian-ness for any field will fail.
hexEncoding := "000102030405060708090a0b0c0d0e0f"
expBatch := sequencer.BatchContext{
NumSequencedTxs: 0x000102,
NumSubsequentQueueTxs: 0x030405,
Timestamp: 0x060708090a,
BlockNumber: 0x0b0c0d0e0f,
}
rawBytes, err := hex.DecodeString(hexEncoding)
require.Nil(t, err)
// Test Read produces expected batch.
var batch sequencer.BatchContext
err = batch.Read(bytes.NewReader(rawBytes))
require.Nil(t, err)
require.Equal(t, expBatch, batch)
// Test Write produces original test vector.
var buf bytes.Buffer
batch.Write(&buf)
require.Equal(t, hexEncoding, hex.EncodeToString(buf.Bytes()))
}
// AppendSequencerBatchParamsTestCases is an enclosing struct that holds the
// individual AppendSequencerBatchParamsTests. This is the root-level object
// that will be parsed from the JSON, spec test-vectors.
type AppendSequencerBatchParamsTestCases struct {
Tests []AppendSequencerBatchParamsTest `json:"tests"`
}
// AppendSequencerBatchParamsTest specifies a single instance of a valid
// encode/decode test case for an AppendequencerBatchParams.
type AppendSequencerBatchParamsTest struct {
Name string `json:"name"`
HexEncoding string `json:"hex_encoding"`
ShouldStartAtElement uint64 `json:"should_start_at_element"`
TotalElementsToAppend uint64 `json:"total_elements_to_append"`
Contexts []sequencer.BatchContext `json:"contexts"`
Txs []string `json:"txs"`
}
var appendSequencerBatchParamTests = AppendSequencerBatchParamsTestCases{
Tests: []AppendSequencerBatchParamsTest{
{
Name: "empty batch",
HexEncoding: "0000000000000000" +
"000000",
ShouldStartAtElement: 0,
TotalElementsToAppend: 0,
Contexts: nil,
Txs: nil,
},
{
Name: "single tx",
HexEncoding: "0000000001000001" +
"000000" +
"00000ac9808080808080808080",
ShouldStartAtElement: 1,
TotalElementsToAppend: 1,
Contexts: nil,
Txs: []string{
"c9808080808080808080",
},
},
{
Name: "multiple txs",
HexEncoding: "0000000001000004" +
"000000" +
"00000ac9808080808080808080" +
"00000ac9808080808080808080" +
"00000ac9808080808080808080" +
"00000ac9808080808080808080",
ShouldStartAtElement: 1,
TotalElementsToAppend: 4,
Contexts: nil,
Txs: []string{
"c9808080808080808080",
"c9808080808080808080",
"c9808080808080808080",
"c9808080808080808080",
},
},
{
Name: "single context",
HexEncoding: "0000000001000000" +
"000001" +
"000102030405060708090a0b0c0d0e0f",
ShouldStartAtElement: 1,
TotalElementsToAppend: 0,
Contexts: []sequencer.BatchContext{
{
NumSequencedTxs: 0x000102,
NumSubsequentQueueTxs: 0x030405,
Timestamp: 0x060708090a,
BlockNumber: 0x0b0c0d0e0f,
},
},
Txs: nil,
},
{
Name: "multiple contexts",
HexEncoding: "0000000001000000" +
"000004" +
"000102030405060708090a0b0c0d0e0f" +
"000102030405060708090a0b0c0d0e0f" +
"000102030405060708090a0b0c0d0e0f" +
"000102030405060708090a0b0c0d0e0f",
ShouldStartAtElement: 1,
TotalElementsToAppend: 0,
Contexts: []sequencer.BatchContext{
{
NumSequencedTxs: 0x000102,
NumSubsequentQueueTxs: 0x030405,
Timestamp: 0x060708090a,
BlockNumber: 0x0b0c0d0e0f,
},
{
NumSequencedTxs: 0x000102,
NumSubsequentQueueTxs: 0x030405,
Timestamp: 0x060708090a,
BlockNumber: 0x0b0c0d0e0f,
},
{
NumSequencedTxs: 0x000102,
NumSubsequentQueueTxs: 0x030405,
Timestamp: 0x060708090a,
BlockNumber: 0x0b0c0d0e0f,
},
{
NumSequencedTxs: 0x000102,
NumSubsequentQueueTxs: 0x030405,
Timestamp: 0x060708090a,
BlockNumber: 0x0b0c0d0e0f,
},
},
Txs: nil,
},
{
Name: "complex",
HexEncoding: "0102030405060708" +
"000004" +
"000102030405060708090a0b0c0d0e0f" +
"000102030405060708090a0b0c0d0e0f" +
"000102030405060708090a0b0c0d0e0f" +
"000102030405060708090a0b0c0d0e0f" +
"00000ac9808080808080808080" +
"00000ac9808080808080808080" +
"00000ac9808080808080808080" +
"00000ac9808080808080808080",
ShouldStartAtElement: 0x0102030405,
TotalElementsToAppend: 0x060708,
Contexts: []sequencer.BatchContext{
{
NumSequencedTxs: 0x000102,
NumSubsequentQueueTxs: 0x030405,
Timestamp: 0x060708090a,
BlockNumber: 0x0b0c0d0e0f,
},
{
NumSequencedTxs: 0x000102,
NumSubsequentQueueTxs: 0x030405,
Timestamp: 0x060708090a,
BlockNumber: 0x0b0c0d0e0f,
},
{
NumSequencedTxs: 0x000102,
NumSubsequentQueueTxs: 0x030405,
Timestamp: 0x060708090a,
BlockNumber: 0x0b0c0d0e0f,
},
{
NumSequencedTxs: 0x000102,
NumSubsequentQueueTxs: 0x030405,
Timestamp: 0x060708090a,
BlockNumber: 0x0b0c0d0e0f,
},
},
Txs: []string{
"c9808080808080808080",
"c9808080808080808080",
"c9808080808080808080",
"c9808080808080808080",
},
},
},
}
// TestAppendSequencerBatchParamsEncodeDecodeMatchesJSON ensures that the
// in-memory test vectors for valid encode/decode stay in sync with the JSON
// version.
func TestAppendSequencerBatchParamsEncodeDecodeMatchesJSON(t *testing.T) {
t.Parallel()
jsonBytes, err := json.MarshalIndent(appendSequencerBatchParamTests, "", "\t")
require.Nil(t, err)
data, err := os.ReadFile("./testdata/valid_append_sequencer_batch_params.json")
require.Nil(t, err)
require.Equal(t, jsonBytes, data)
}
// TestAppendSequencerBatchParamsEncodeDecode asserts the proper encoding and
// decoding of valid serializations for AppendSequencerBatchParams.
func TestAppendSequencerBatchParamsEncodeDecode(t *testing.T) {
t.Parallel()
for _, test := range appendSequencerBatchParamTests.Tests {
t.Run(test.Name, func(t *testing.T) {
testAppendSequencerBatchParamsEncodeDecode(t, test)
})
}
}
func testAppendSequencerBatchParamsEncodeDecode(
t *testing.T, test AppendSequencerBatchParamsTest) {
// Decode the expected transactions from their hex serialization.
var expTxs []*l2types.Transaction
for _, txHex := range test.Txs {
txBytes, err := hex.DecodeString(txHex)
require.Nil(t, err)
rlpStream := l2rlp.NewStream(bytes.NewReader(txBytes), uint64(len(txBytes)))
tx := new(l2types.Transaction)
err = tx.DecodeRLP(rlpStream)
require.Nil(t, err)
expTxs = append(expTxs, tx)
}
// Construct the params we expect to decode, minus the txs. Those are
// compared separately below.
expParams := sequencer.AppendSequencerBatchParams{
ShouldStartAtElement: test.ShouldStartAtElement,
TotalElementsToAppend: test.TotalElementsToAppend,
Contexts: test.Contexts,
Txs: nil,
}
// Decode the batch from the test string.
rawBytes, err := hex.DecodeString(test.HexEncoding)
require.Nil(t, err)
var params sequencer.AppendSequencerBatchParams
err = params.Read(bytes.NewReader(rawBytes))
require.Nil(t, err)
// Assert that the decoded params match the expected params. The
// transactions are compared serparetly (via hash), since the internal
// `time` field of each transaction will differ. This field is only used
// for spam prevention, so it is safe to ignore wrt. to serialization.
// The decoded txs are reset on the the decoded params afterwards to
// test the serialization.
decodedTxs := params.Txs
params.Txs = nil
require.Equal(t, expParams, params)
compareTxs(t, expTxs, decodedTxs)
params.Txs = decodedTxs
// Finally, encode the decoded object and assert it matches the original
// hex string.
paramsBytes, err := params.Serialize()
require.Nil(t, err)
require.Equal(t, test.HexEncoding, hex.EncodeToString(paramsBytes))
}
// compareTxs compares a list of two transactions, testing each pair by tx hash.
// This is used rather than require.Equal, since there `time` metadata on the
// decoded tx and the expected tx will differ, and can't be modified/ignored.
func compareTxs(t *testing.T, a, b []*l2types.Transaction) {
require.Equal(t, len(a), len(b))
for i, txA := range a {
require.Equal(t, txA.Hash(), b[i].Hash())
}
}
{
"tests": [
{
"name": "empty batch",
"hex_encoding": "0000000000000000000000",
"should_start_at_element": 0,
"total_elements_to_append": 0,
"contexts": null,
"txs": null
},
{
"name": "single tx",
"hex_encoding": "000000000100000100000000000ac9808080808080808080",
"should_start_at_element": 1,
"total_elements_to_append": 1,
"contexts": null,
"txs": [
"c9808080808080808080"
]
},
{
"name": "multiple txs",
"hex_encoding": "000000000100000400000000000ac980808080808080808000000ac980808080808080808000000ac980808080808080808000000ac9808080808080808080",
"should_start_at_element": 1,
"total_elements_to_append": 4,
"contexts": null,
"txs": [
"c9808080808080808080",
"c9808080808080808080",
"c9808080808080808080",
"c9808080808080808080"
]
},
{
"name": "single context",
"hex_encoding": "0000000001000000000001000102030405060708090a0b0c0d0e0f",
"should_start_at_element": 1,
"total_elements_to_append": 0,
"contexts": [
{
"num_sequenced_txs": 258,
"num_subsequent_queue_txs": 197637,
"timestamp": 25887770890,
"block_number": 47446822415
}
],
"txs": null
},
{
"name": "multiple contexts",
"hex_encoding": "0000000001000000000004000102030405060708090a0b0c0d0e0f000102030405060708090a0b0c0d0e0f000102030405060708090a0b0c0d0e0f000102030405060708090a0b0c0d0e0f",
"should_start_at_element": 1,
"total_elements_to_append": 0,
"contexts": [
{
"num_sequenced_txs": 258,
"num_subsequent_queue_txs": 197637,
"timestamp": 25887770890,
"block_number": 47446822415
},
{
"num_sequenced_txs": 258,
"num_subsequent_queue_txs": 197637,
"timestamp": 25887770890,
"block_number": 47446822415
},
{
"num_sequenced_txs": 258,
"num_subsequent_queue_txs": 197637,
"timestamp": 25887770890,
"block_number": 47446822415
},
{
"num_sequenced_txs": 258,
"num_subsequent_queue_txs": 197637,
"timestamp": 25887770890,
"block_number": 47446822415
}
],
"txs": null
},
{
"name": "complex",
"hex_encoding": "0102030405060708000004000102030405060708090a0b0c0d0e0f000102030405060708090a0b0c0d0e0f000102030405060708090a0b0c0d0e0f000102030405060708090a0b0c0d0e0f00000ac980808080808080808000000ac980808080808080808000000ac980808080808080808000000ac9808080808080808080",
"should_start_at_element": 4328719365,
"total_elements_to_append": 395016,
"contexts": [
{
"num_sequenced_txs": 258,
"num_subsequent_queue_txs": 197637,
"timestamp": 25887770890,
"block_number": 47446822415
},
{
"num_sequenced_txs": 258,
"num_subsequent_queue_txs": 197637,
"timestamp": 25887770890,
"block_number": 47446822415
},
{
"num_sequenced_txs": 258,
"num_subsequent_queue_txs": 197637,
"timestamp": 25887770890,
"block_number": 47446822415
},
{
"num_sequenced_txs": 258,
"num_subsequent_queue_txs": 197637,
"timestamp": 25887770890,
"block_number": 47446822415
}
],
"txs": [
"c9808080808080808080",
"c9808080808080808080",
"c9808080808080808080",
"c9808080808080808080"
]
}
]
}
\ No newline at end of file
......@@ -5,10 +5,13 @@ go 1.16
require (
github.com/btcsuite/btcd v0.22.0-beta // indirect
github.com/decred/dcrd/hdkeychain/v3 v3.0.0
github.com/ethereum-optimism/optimism/l2geth v1.0.0
github.com/ethereum/go-ethereum v1.10.11
github.com/getsentry/sentry-go v0.11.0
github.com/prometheus/client_golang v1.0.0
github.com/prometheus/client_golang v1.11.0
github.com/stretchr/testify v1.7.0
github.com/tyler-smith/go-bip39 v1.0.1-0.20181017060643-dbb3b84ba2ef
github.com/urfave/cli v1.22.5
)
replace github.com/ethereum-optimism/optimism/l2geth => ../../l2geth
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment