Commit 3873b696 authored by Mark Tyneway's avatar Mark Tyneway

dtl: enable typed batch support

The data transport layer will now be able to index
typed batches. A typed batch has the first batch context
have a timestamp of 0 and the blocknumber is used as an
enum to determine the type of the batch.

It is not possible to have a timestamp of 0 in realistic
conditions. Batches that have a non zero timestamp
as the first batch context are considered a legacy batch.

The first typed batch is type 0 where the blocknumber is 0
and the transaction data is compressed with zlib.
parent 55d34935
---
'@eth-optimism/data-transport-layer': patch
---
Enable typed batch support
/* Imports: External */
import { LevelUp } from 'levelup'
import { BigNumber } from 'ethers'
import { BatchType } from '@eth-optimism/core-utils'
/* Imports: Internal */
import { SimpleDB } from './simple-db'
......@@ -126,7 +127,14 @@ export class TransportDB {
public async getTransactionBatchByIndex(
index: number
): Promise<TransactionBatchEntry> {
return this._getEntryByIndex(TRANSPORT_DB_KEYS.TRANSACTION_BATCH, index)
const entry = (await this._getEntryByIndex(
TRANSPORT_DB_KEYS.TRANSACTION_BATCH,
index
)) as TransactionBatchEntry
if (entry && typeof entry.type === 'undefined') {
entry.type = BatchType[BatchType.LEGACY]
}
return entry
}
public async getStateRootByIndex(index: number): Promise<StateRootEntry> {
......@@ -168,7 +176,13 @@ export class TransportDB {
}
public async getLatestTransactionBatch(): Promise<TransactionBatchEntry> {
return this._getLatestEntry(TRANSPORT_DB_KEYS.TRANSACTION_BATCH)
const entry = (await this._getLatestEntry(
TRANSPORT_DB_KEYS.TRANSACTION_BATCH
)) as TransactionBatchEntry
if (entry && typeof entry.type === 'undefined') {
entry.type = BatchType[BatchType.LEGACY]
}
return entry
}
public async getLatestStateRoot(): Promise<StateRootEntry> {
......
/* Imports: External */
import { BigNumber, ethers, constants } from 'ethers'
import { serialize, Transaction } from '@ethersproject/transactions'
import { getContractFactory } from '@eth-optimism/contracts'
import {
fromHexString,
toHexString,
toRpcHexString,
BatchType,
SequencerBatch,
} from '@eth-optimism/core-utils'
import { SequencerBatchAppendedEvent } from '@eth-optimism/contracts/dist/types/CanonicalTransactionChain'
......@@ -76,33 +78,33 @@ export const handleEventsSequencerBatchAppended: EventHandlerSet<
parseEvent: (event, extraData, l2ChainId) => {
const transactionEntries: TransactionEntry[] = []
// It's easier to deal with this data if it's a Buffer.
const calldata = fromHexString(extraData.l1TransactionData)
if (calldata.length < 12) {
// 12 * 2 + 2 = 26
if (extraData.l1TransactionData.length < 26) {
throw new Error(
`Block ${extraData.blockNumber} transaction data is invalid for decoding: ${extraData.l1TransactionData} , ` +
`converted buffer length is < 12.`
`Block ${extraData.blockNumber} transaction data is too small: ${extraData.l1TransactionData.length}`
)
}
const numContexts = BigNumber.from(calldata.slice(12, 15)).toNumber()
// TODO: typings not working?
const decoded = (SequencerBatch as any).fromHex(extraData.l1TransactionData)
// Keep track of the CTC index
let transactionIndex = 0
// Keep track of the number of deposits
let enqueuedCount = 0
let nextTxPointer = 15 + 16 * numContexts
for (let i = 0; i < numContexts; i++) {
const contextPointer = 15 + 16 * i
const context = parseSequencerBatchContext(calldata, contextPointer)
// Keep track of the tx index in the current batch
let index = 0
for (const context of decoded.contexts) {
for (let j = 0; j < context.numSequencedTransactions; j++) {
const sequencerTransaction = parseSequencerBatchTransaction(
calldata,
nextTxPointer
)
const buf = decoded.transactions[index]
if (!buf) {
throw new Error(
`Invalid batch context, tx count: ${decoded.transactions.length}, attempting to parse ${index}`
)
}
const decoded = decodeSequencerBatchTransaction(
sequencerTransaction,
l2ChainId
)
const tx = buf.toTransaction()
transactionEntries.push({
index: extraData.prevTotalElements
......@@ -114,16 +116,29 @@ export const handleEventsSequencerBatchAppended: EventHandlerSet<
gasLimit: BigNumber.from(0).toString(),
target: constants.AddressZero,
origin: null,
data: toHexString(sequencerTransaction),
data: serialize(
{
nonce: tx.nonce,
gasPrice: tx.gasPrice,
gasLimit: tx.gasLimit,
to: tx.to,
value: tx.value,
data: tx.data,
},
{
v: tx.v,
r: tx.r,
s: tx.s,
}
),
queueOrigin: 'sequencer',
value: decoded.value,
value: toRpcHexString(tx.value),
queueIndex: null,
decoded,
decoded: mapSequencerTransaction(tx, l2ChainId),
confirmed: true,
})
nextTxPointer += 3 + sequencerTransaction.length
transactionIndex++
index++
}
for (let j = 0; j < context.numSubsequentQueueTransactions; j++) {
......@@ -169,6 +184,7 @@ export const handleEventsSequencerBatchAppended: EventHandlerSet<
timestamp: BigNumber.from(extraData.timestamp).toNumber(),
submitter: extraData.submitter,
l1TransactionHash: extraData.l1TransactionHash,
type: BatchType[decoded.type],
}
return {
......@@ -206,61 +222,21 @@ export const handleEventsSequencerBatchAppended: EventHandlerSet<
},
}
interface SequencerBatchContext {
numSequencedTransactions: number
numSubsequentQueueTransactions: number
timestamp: number
blockNumber: number
}
const parseSequencerBatchContext = (
calldata: Buffer,
offset: number
): SequencerBatchContext => {
return {
numSequencedTransactions: BigNumber.from(
calldata.slice(offset, offset + 3)
).toNumber(),
numSubsequentQueueTransactions: BigNumber.from(
calldata.slice(offset + 3, offset + 6)
).toNumber(),
timestamp: BigNumber.from(
calldata.slice(offset + 6, offset + 11)
).toNumber(),
blockNumber: BigNumber.from(
calldata.slice(offset + 11, offset + 16)
).toNumber(),
}
}
const parseSequencerBatchTransaction = (
calldata: Buffer,
offset: number
): Buffer => {
const transactionLength = BigNumber.from(
calldata.slice(offset, offset + 3)
).toNumber()
return calldata.slice(offset + 3, offset + 3 + transactionLength)
}
const decodeSequencerBatchTransaction = (
transaction: Buffer,
const mapSequencerTransaction = (
tx: Transaction,
l2ChainId: number
): DecodedSequencerBatchTransaction => {
const decodedTx = ethers.utils.parseTransaction(transaction)
return {
nonce: BigNumber.from(decodedTx.nonce).toString(),
gasPrice: BigNumber.from(decodedTx.gasPrice).toString(),
gasLimit: BigNumber.from(decodedTx.gasLimit).toString(),
value: toRpcHexString(decodedTx.value),
target: decodedTx.to ? toHexString(decodedTx.to) : null,
data: toHexString(decodedTx.data),
nonce: BigNumber.from(tx.nonce).toString(),
gasPrice: BigNumber.from(tx.gasPrice).toString(),
gasLimit: BigNumber.from(tx.gasLimit).toString(),
value: toRpcHexString(tx.value),
target: tx.to ? toHexString(tx.to) : null,
data: toHexString(tx.data),
sig: {
v: parseSignatureVParam(decodedTx.v, l2ChainId),
r: toHexString(decodedTx.r),
s: toHexString(decodedTx.s),
v: parseSignatureVParam(tx.v, l2ChainId),
r: toHexString(tx.r),
s: toHexString(tx.s),
},
}
}
......@@ -60,6 +60,7 @@ export const handleEventsStateBatchAppended: EventHandlerSet<
prevTotalElements: event.args._prevTotalElements.toNumber(),
extraData: event.args._extraData,
l1TransactionHash: extraData.l1TransactionHash,
type: 'LEGACY', // There is currently only 1 state root batch type
}
return {
......
......@@ -48,6 +48,7 @@ interface BatchEntry {
prevTotalElements: number
extraData: string
l1TransactionHash: string
type: string
}
export type TransactionBatchEntry = BatchEntry
......
/* External Imports */
import fs from 'fs'
import path from 'path'
import chai = require('chai')
import Mocha from 'mocha'
import chaiAsPromised from 'chai-as-promised'
import { BigNumber } from 'ethers'
// Chai plugins go here.
chai.use(chaiAsPromised)
......@@ -9,4 +12,38 @@ chai.use(chaiAsPromised)
const should = chai.should()
const expect = chai.expect
export { should, expect, Mocha }
const readMockData = () => {
const mockDataPath = path.join(__dirname, 'unit-tests', 'examples')
const paths = fs.readdirSync(mockDataPath)
const files = []
for (const filename of paths) {
// Skip non .txt files
if (!filename.endsWith('.txt')) {
continue
}
const filePath = path.join(mockDataPath, filename)
const file = fs.readFileSync(filePath)
const obj = JSON.parse(file.toString())
// Reserialize the BigNumbers
obj.input.extraData.prevTotalElements = BigNumber.from(
obj.input.extraData.prevTotalElements
)
obj.input.extraData.batchIndex = BigNumber.from(
obj.input.extraData.batchIndex
)
if (obj.input.event.args.length !== 3) {
throw new Error(`ABI mismatch`)
}
obj.input.event.args = obj.input.event.args.map(BigNumber.from)
obj.input.event.args._startingQueueIndex = obj.input.event.args[0]
obj.input.event.args._numQueueElements = obj.input.event.args[1]
obj.input.event.args._totalElements = obj.input.event.args[2]
obj.input.extraData.batchSize = BigNumber.from(
obj.input.extraData.batchSize
)
files.push(obj)
}
return files
}
export { should, expect, Mocha, readMockData }
import { BigNumber, ethers } from 'ethers'
import { sequencerBatch, add0x, BatchType } from '@eth-optimism/core-utils'
const compressBatchWithZlib = (calldata: string): string => {
const batch = sequencerBatch.decode(calldata)
batch.type = BatchType.ZLIB
const encoded = sequencerBatch.encode(batch)
return add0x(encoded)
}
/* Imports: Internal */
import { expect } from '../../../../setup'
import { expect, readMockData } from '../../../../setup'
import { handleEventsSequencerBatchAppended } from '../../../../../src/services/l1-ingestion/handlers/sequencer-batch-appended'
import { SequencerBatchAppendedExtraData } from '../../../../../src/types'
describe('Event Handlers: CanonicalTransactionChain.SequencerBatchAppended', () => {
const mockData = readMockData()
describe('handleEventsSequencerBatchAppended.parseEvent', () => {
// This tests the behavior of parsing a real mainnet transaction,
// so it will break if the encoding scheme changes.
......@@ -46,9 +56,53 @@ describe('Event Handlers: CanonicalTransactionChain.SequencerBatchAppended', ()
expect(() => {
handleEventsSequencerBatchAppended.parseEvent(...input1)
}).to.throw(
`Block ${input1[1].blockNumber} transaction data is invalid for decoding: ${input1[1].l1TransactionData} , ` +
`converted buffer length is < 12.`
`Block ${input1[1].blockNumber} transaction data is too small: ${input1[1].l1TransactionData.length}`
)
})
describe('mainnet transactions', () => {
for (const mock of mockData) {
const { input, output } = mock
const { event, extraData, l2ChainId } = input
const hash = mock.input.extraData.l1TransactionHash
it(`uncompressed: ${hash}`, () => {
// Set the type to be legacy
output.transactionBatchEntry.type = BatchType[BatchType.LEGACY]
const res = handleEventsSequencerBatchAppended.parseEvent(
event,
extraData,
l2ChainId
)
// Check all of the transaction entries individually
for (const [i, got] of res.transactionEntries.entries()) {
const expected = output.transactionEntries[i]
expect(got).to.deep.eq(expected, `case ${i}`)
}
expect(res).to.deep.eq(output)
})
it(`compressed: ${hash}`, () => {
// Set the type to be zlib
output.transactionBatchEntry.type = BatchType[BatchType.ZLIB]
const compressed = compressBatchWithZlib(
input.extraData.l1TransactionData
)
const copy = { ...extraData }
copy.l1TransactionData = compressed
const res = handleEventsSequencerBatchAppended.parseEvent(
event,
copy,
l2ChainId
)
expect(res).to.deep.eq(output)
})
}
})
})
})
{
"extends": "../../tsconfig.json"
"extends": "../../tsconfig.json",
"typeRoots": ["node_modules/@types", "src/@types"]
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment