batch_queue_test.go 8.67 KB
Newer Older
1 2 3 4
package derive

import (
	"context"
5
	"encoding/binary"
6 7 8 9
	"io"
	"math/rand"
	"testing"

10
	"github.com/stretchr/testify/require"
11

12 13 14 15
	"github.com/ethereum-optimism/optimism/op-node/eth"
	"github.com/ethereum-optimism/optimism/op-node/rollup"
	"github.com/ethereum-optimism/optimism/op-node/testlog"
	"github.com/ethereum-optimism/optimism/op-node/testutils"
16
	"github.com/ethereum/go-ethereum/common"
17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33
	"github.com/ethereum/go-ethereum/common/hexutil"
	"github.com/ethereum/go-ethereum/log"
)

// fakeBatchQueueOutput fakes the next stage (receive only) for the batch queue
// It tracks the progress state of the next stage.
// Upon receiving a batch, relevant characteristic of safeL2Head are immediately advanced.
type fakeBatchQueueOutput struct {
	progress   Progress
	batches    []*BatchData
	safeL2Head eth.L2BlockRef
}

var _ BatchQueueOutput = (*fakeBatchQueueOutput)(nil)

func (f *fakeBatchQueueOutput) AddBatch(batch *BatchData) {
	f.batches = append(f.batches, batch)
34 35 36 37
	if batch.ParentHash != f.safeL2Head.Hash {
		panic("batch has wrong parent hash")
	}
	newEpoch := f.safeL2Head.L1Origin.Hash != batch.EpochHash
38 39 40
	// Advance SafeL2Head
	f.safeL2Head.Time = batch.Timestamp
	f.safeL2Head.L1Origin.Number = uint64(batch.EpochNum)
41 42 43 44 45 46 47 48
	f.safeL2Head.L1Origin.Hash = batch.EpochHash
	if newEpoch {
		f.safeL2Head.SequenceNumber = 0
	} else {
		f.safeL2Head.SequenceNumber += 1
	}
	f.safeL2Head.ParentHash = batch.ParentHash
	f.safeL2Head.Hash = mockHash(batch.Timestamp, 2)
49 50 51 52 53 54 55 56 57 58
}

func (f *fakeBatchQueueOutput) SafeL2Head() eth.L2BlockRef {
	return f.safeL2Head
}

func (f *fakeBatchQueueOutput) Progress() Progress {
	return f.progress
}

59 60 61 62 63 64
func mockHash(time uint64, layer uint8) common.Hash {
	hash := common.Hash{31: layer} // indicate L1 or L2
	binary.LittleEndian.PutUint64(hash[:], time)
	return hash
}

65 66 67 68
func b(timestamp uint64, epoch eth.L1BlockRef) *BatchData {
	rng := rand.New(rand.NewSource(int64(timestamp)))
	data := testutils.RandomData(rng, 20)
	return &BatchData{BatchV1{
69
		ParentHash:   mockHash(timestamp-2, 2),
70 71 72 73 74 75 76 77 78
		Timestamp:    timestamp,
		EpochNum:     rollup.Epoch(epoch.Number),
		EpochHash:    epoch.Hash,
		Transactions: []hexutil.Bytes{data},
	}}
}

func L1Chain(l1Times []uint64) []eth.L1BlockRef {
	var out []eth.L1BlockRef
79
	var parentHash common.Hash
80
	for i, time := range l1Times {
81
		hash := mockHash(time, 1)
82 83 84 85 86 87 88 89 90 91 92 93 94
		out = append(out, eth.L1BlockRef{
			Hash:       hash,
			Number:     uint64(i),
			ParentHash: parentHash,
			Time:       time,
		})
		parentHash = hash
	}
	return out
}

func TestBatchQueueEager(t *testing.T) {
	log := testlog.Logger(t, log.LvlTrace)
95
	l1 := L1Chain([]uint64{10, 20, 30})
96 97
	next := &fakeBatchQueueOutput{
		safeL2Head: eth.L2BlockRef{
98 99 100 101 102 103 104 105 106 107
			Hash:           mockHash(10, 2),
			Number:         0,
			ParentHash:     common.Hash{},
			Time:           10,
			L1Origin:       l1[0].ID(),
			SequenceNumber: 0,
		},
		progress: Progress{
			Origin: l1[0],
			Closed: false,
108 109 110 111 112 113 114 115 116 117 118
		},
	}
	cfg := &rollup.Config{
		Genesis: rollup.Genesis{
			L2Time: 10,
		},
		BlockTime:         2,
		MaxSequencerDrift: 600,
		SeqWindowSize:     30,
	}

119 120
	bq := NewBatchQueue(log, cfg, next)
	require.Equal(t, io.EOF, bq.ResetStep(context.Background(), nil), "reset should complete without l1 fetcher, single step")
121

122 123 124
	// We start with an open L1 origin as progress in the first step
	progress := bq.progress
	require.Equal(t, bq.progress.Closed, false)
125 126 127 128

	// Add batches
	batches := []*BatchData{b(12, l1[0]), b(14, l1[0])}
	for _, batch := range batches {
129
		bq.AddBatch(batch)
130 131
	}
	// Step
132 133
	require.NoError(t, RepeatStep(t, bq.Step, progress, 10))

134 135 136 137 138 139
	// Verify Output
	require.Equal(t, batches, next.batches)
}

func TestBatchQueueFull(t *testing.T) {
	log := testlog.Logger(t, log.LvlTrace)
140
	l1 := L1Chain([]uint64{10, 15, 20})
141 142
	next := &fakeBatchQueueOutput{
		safeL2Head: eth.L2BlockRef{
143 144 145 146 147 148 149 150 151 152
			Hash:           mockHash(10, 2),
			Number:         0,
			ParentHash:     common.Hash{},
			Time:           10,
			L1Origin:       l1[0].ID(),
			SequenceNumber: 0,
		},
		progress: Progress{
			Origin: l1[0],
			Closed: false,
153 154 155 156 157 158 159 160 161 162 163
		},
	}
	cfg := &rollup.Config{
		Genesis: rollup.Genesis{
			L2Time: 10,
		},
		BlockTime:         2,
		MaxSequencerDrift: 600,
		SeqWindowSize:     2,
	}

164 165
	bq := NewBatchQueue(log, cfg, next)
	require.Equal(t, io.EOF, bq.ResetStep(context.Background(), nil), "reset should complete without l1 fetcher, single step")
166

167 168
	// We start with an open L1 origin as progress in the first step
	progress := bq.progress
169 170 171 172 173
	require.Equal(t, bq.progress.Closed, false)

	// Add batches
	batches := []*BatchData{b(14, l1[0]), b(16, l1[0]), b(18, l1[1])}
	for _, batch := range batches {
174
		bq.AddBatch(batch)
175 176
	}
	// Missing first batch
177
	err := bq.Step(context.Background(), progress)
178 179 180
	require.Equal(t, err, io.EOF)

	// Close previous to close bq
181 182
	progress.Closed = true
	err = bq.Step(context.Background(), progress)
183 184 185 186
	require.Equal(t, err, nil)
	require.Equal(t, bq.progress.Closed, true)

	// Open previous to open bq with the new inclusion block
187 188 189
	progress.Closed = false
	progress.Origin = l1[1]
	err = bq.Step(context.Background(), progress)
190 191 192 193
	require.Equal(t, err, nil)
	require.Equal(t, bq.progress.Closed, false)

	// Close previous to close bq (for epoch 2)
194 195
	progress.Closed = true
	err = bq.Step(context.Background(), progress)
196 197 198 199
	require.Equal(t, err, nil)
	require.Equal(t, bq.progress.Closed, true)

	// Open previous to open bq with the new inclusion block (epoch 2)
200 201 202
	progress.Closed = false
	progress.Origin = l1[2]
	err = bq.Step(context.Background(), progress)
203 204 205 206 207
	require.Equal(t, err, nil)
	require.Equal(t, bq.progress.Closed, false)

	// Finally add batch
	firstBatch := b(12, l1[0])
208
	bq.AddBatch(firstBatch)
209 210

	// Close the origin
211 212
	progress.Closed = true
	err = bq.Step(context.Background(), progress)
213 214 215 216
	require.Equal(t, err, nil)
	require.Equal(t, bq.progress.Closed, true)

	// Step, but should have full epoch now
217 218
	require.NoError(t, RepeatStep(t, bq.Step, progress, 10))

219 220 221 222 223 224 225 226 227
	// Verify Output
	var final []*BatchData
	final = append(final, firstBatch)
	final = append(final, batches...)
	require.Equal(t, final, next.batches)
}

func TestBatchQueueMissing(t *testing.T) {
	log := testlog.Logger(t, log.LvlTrace)
228
	l1 := L1Chain([]uint64{10, 15, 20})
229 230
	next := &fakeBatchQueueOutput{
		safeL2Head: eth.L2BlockRef{
231 232 233 234 235 236 237 238 239 240
			Hash:           mockHash(10, 2),
			Number:         0,
			ParentHash:     common.Hash{},
			Time:           10,
			L1Origin:       l1[0].ID(),
			SequenceNumber: 0,
		},
		progress: Progress{
			Origin: l1[0],
			Closed: false,
241 242 243 244 245 246 247 248 249 250 251
		},
	}
	cfg := &rollup.Config{
		Genesis: rollup.Genesis{
			L2Time: 10,
		},
		BlockTime:         2,
		MaxSequencerDrift: 600,
		SeqWindowSize:     2,
	}

252 253
	bq := NewBatchQueue(log, cfg, next)
	require.Equal(t, io.EOF, bq.ResetStep(context.Background(), nil), "reset should complete without l1 fetcher, single step")
254

255 256
	// We start with an open L1 origin as progress in the first step
	progress := bq.progress
257 258
	require.Equal(t, bq.progress.Closed, false)

259 260 261 262
	// The batches at 18 and 20 are skipped to stop 22 from being eagerly processed.
	// This test checks that batch timestamp 12 & 14 are created, 16 is used, and 18 is advancing the epoch.
	// Due to the large sequencer time drift 16 is perfectly valid to have epoch 0 as origin.
	batches := []*BatchData{b(16, l1[0]), b(22, l1[1])}
263
	for _, batch := range batches {
264
		bq.AddBatch(batch)
265
	}
266 267
	// Missing first batches with timestamp 12 and 14, nothing to do yet.
	err := bq.Step(context.Background(), progress)
268 269
	require.Equal(t, err, io.EOF)

270 271 272
	// Close l1[0]
	progress.Closed = true
	require.NoError(t, RepeatStep(t, bq.Step, progress, 10))
273 274
	require.Equal(t, bq.progress.Closed, true)

275 276 277 278
	// Open l1[1]
	progress.Closed = false
	progress.Origin = l1[1]
	require.NoError(t, RepeatStep(t, bq.Step, progress, 10))
279
	require.Equal(t, bq.progress.Closed, false)
280
	require.Empty(t, next.batches, "no batches yet, sequence window did not expire, waiting for 12 and 14")
281

282 283 284
	// Close l1[1]
	progress.Closed = true
	require.NoError(t, RepeatStep(t, bq.Step, progress, 10))
285 286
	require.Equal(t, bq.progress.Closed, true)

287 288 289 290
	// Open l1[2]
	progress.Closed = false
	progress.Origin = l1[2]
	require.NoError(t, RepeatStep(t, bq.Step, progress, 10))
291 292
	require.Equal(t, bq.progress.Closed, false)

293 294 295 296
	// Close l1[2], this is the moment that l1[0] expires and empty batches 12 and 14 can be created,
	// and batch 16 can then be used.
	progress.Closed = true
	require.NoError(t, RepeatStep(t, bq.Step, progress, 10))
297
	require.Equal(t, bq.progress.Closed, true)
298 299 300 301 302 303
	require.Equal(t, 4, len(next.batches), "expecting empty batches with timestamp 12 and 14 to be created and existing batch 16 to follow")
	require.Equal(t, uint64(12), next.batches[0].Timestamp)
	require.Equal(t, uint64(14), next.batches[1].Timestamp)
	require.Equal(t, batches[0], next.batches[2])
	require.Equal(t, uint64(18), next.batches[3].Timestamp)
	require.Equal(t, rollup.Epoch(1), next.batches[3].EpochNum)
304
}