Commit 3873b696 authored by Mark Tyneway's avatar Mark Tyneway

dtl: enable typed batch support

The data transport layer will now be able to index
typed batches. A typed batch has the first batch context
have a timestamp of 0 and the blocknumber is used as an
enum to determine the type of the batch.

It is not possible to have a timestamp of 0 in realistic
conditions. Batches that have a non zero timestamp
as the first batch context are considered a legacy batch.

The first typed batch is type 0 where the blocknumber is 0
and the transaction data is compressed with zlib.
parent 55d34935
---
'@eth-optimism/data-transport-layer': patch
---
Enable typed batch support
/* Imports: External */ /* Imports: External */
import { LevelUp } from 'levelup' import { LevelUp } from 'levelup'
import { BigNumber } from 'ethers' import { BigNumber } from 'ethers'
import { BatchType } from '@eth-optimism/core-utils'
/* Imports: Internal */ /* Imports: Internal */
import { SimpleDB } from './simple-db' import { SimpleDB } from './simple-db'
...@@ -126,7 +127,14 @@ export class TransportDB { ...@@ -126,7 +127,14 @@ export class TransportDB {
public async getTransactionBatchByIndex( public async getTransactionBatchByIndex(
index: number index: number
): Promise<TransactionBatchEntry> { ): Promise<TransactionBatchEntry> {
return this._getEntryByIndex(TRANSPORT_DB_KEYS.TRANSACTION_BATCH, index) const entry = (await this._getEntryByIndex(
TRANSPORT_DB_KEYS.TRANSACTION_BATCH,
index
)) as TransactionBatchEntry
if (entry && typeof entry.type === 'undefined') {
entry.type = BatchType[BatchType.LEGACY]
}
return entry
} }
public async getStateRootByIndex(index: number): Promise<StateRootEntry> { public async getStateRootByIndex(index: number): Promise<StateRootEntry> {
...@@ -168,7 +176,13 @@ export class TransportDB { ...@@ -168,7 +176,13 @@ export class TransportDB {
} }
public async getLatestTransactionBatch(): Promise<TransactionBatchEntry> { public async getLatestTransactionBatch(): Promise<TransactionBatchEntry> {
return this._getLatestEntry(TRANSPORT_DB_KEYS.TRANSACTION_BATCH) const entry = (await this._getLatestEntry(
TRANSPORT_DB_KEYS.TRANSACTION_BATCH
)) as TransactionBatchEntry
if (entry && typeof entry.type === 'undefined') {
entry.type = BatchType[BatchType.LEGACY]
}
return entry
} }
public async getLatestStateRoot(): Promise<StateRootEntry> { public async getLatestStateRoot(): Promise<StateRootEntry> {
......
/* Imports: External */ /* Imports: External */
import { BigNumber, ethers, constants } from 'ethers' import { BigNumber, ethers, constants } from 'ethers'
import { serialize, Transaction } from '@ethersproject/transactions'
import { getContractFactory } from '@eth-optimism/contracts' import { getContractFactory } from '@eth-optimism/contracts'
import { import {
fromHexString,
toHexString, toHexString,
toRpcHexString, toRpcHexString,
BatchType,
SequencerBatch,
} from '@eth-optimism/core-utils' } from '@eth-optimism/core-utils'
import { SequencerBatchAppendedEvent } from '@eth-optimism/contracts/dist/types/CanonicalTransactionChain' import { SequencerBatchAppendedEvent } from '@eth-optimism/contracts/dist/types/CanonicalTransactionChain'
...@@ -76,33 +78,33 @@ export const handleEventsSequencerBatchAppended: EventHandlerSet< ...@@ -76,33 +78,33 @@ export const handleEventsSequencerBatchAppended: EventHandlerSet<
parseEvent: (event, extraData, l2ChainId) => { parseEvent: (event, extraData, l2ChainId) => {
const transactionEntries: TransactionEntry[] = [] const transactionEntries: TransactionEntry[] = []
// It's easier to deal with this data if it's a Buffer. // 12 * 2 + 2 = 26
const calldata = fromHexString(extraData.l1TransactionData) if (extraData.l1TransactionData.length < 26) {
if (calldata.length < 12) {
throw new Error( throw new Error(
`Block ${extraData.blockNumber} transaction data is invalid for decoding: ${extraData.l1TransactionData} , ` + `Block ${extraData.blockNumber} transaction data is too small: ${extraData.l1TransactionData.length}`
`converted buffer length is < 12.`
) )
} }
const numContexts = BigNumber.from(calldata.slice(12, 15)).toNumber()
// TODO: typings not working?
const decoded = (SequencerBatch as any).fromHex(extraData.l1TransactionData)
// Keep track of the CTC index
let transactionIndex = 0 let transactionIndex = 0
// Keep track of the number of deposits
let enqueuedCount = 0 let enqueuedCount = 0
let nextTxPointer = 15 + 16 * numContexts // Keep track of the tx index in the current batch
for (let i = 0; i < numContexts; i++) { let index = 0
const contextPointer = 15 + 16 * i
const context = parseSequencerBatchContext(calldata, contextPointer)
for (const context of decoded.contexts) {
for (let j = 0; j < context.numSequencedTransactions; j++) { for (let j = 0; j < context.numSequencedTransactions; j++) {
const sequencerTransaction = parseSequencerBatchTransaction( const buf = decoded.transactions[index]
calldata, if (!buf) {
nextTxPointer throw new Error(
) `Invalid batch context, tx count: ${decoded.transactions.length}, attempting to parse ${index}`
)
}
const decoded = decodeSequencerBatchTransaction( const tx = buf.toTransaction()
sequencerTransaction,
l2ChainId
)
transactionEntries.push({ transactionEntries.push({
index: extraData.prevTotalElements index: extraData.prevTotalElements
...@@ -114,16 +116,29 @@ export const handleEventsSequencerBatchAppended: EventHandlerSet< ...@@ -114,16 +116,29 @@ export const handleEventsSequencerBatchAppended: EventHandlerSet<
gasLimit: BigNumber.from(0).toString(), gasLimit: BigNumber.from(0).toString(),
target: constants.AddressZero, target: constants.AddressZero,
origin: null, origin: null,
data: toHexString(sequencerTransaction), data: serialize(
{
nonce: tx.nonce,
gasPrice: tx.gasPrice,
gasLimit: tx.gasLimit,
to: tx.to,
value: tx.value,
data: tx.data,
},
{
v: tx.v,
r: tx.r,
s: tx.s,
}
),
queueOrigin: 'sequencer', queueOrigin: 'sequencer',
value: decoded.value, value: toRpcHexString(tx.value),
queueIndex: null, queueIndex: null,
decoded, decoded: mapSequencerTransaction(tx, l2ChainId),
confirmed: true, confirmed: true,
}) })
nextTxPointer += 3 + sequencerTransaction.length
transactionIndex++ transactionIndex++
index++
} }
for (let j = 0; j < context.numSubsequentQueueTransactions; j++) { for (let j = 0; j < context.numSubsequentQueueTransactions; j++) {
...@@ -169,6 +184,7 @@ export const handleEventsSequencerBatchAppended: EventHandlerSet< ...@@ -169,6 +184,7 @@ export const handleEventsSequencerBatchAppended: EventHandlerSet<
timestamp: BigNumber.from(extraData.timestamp).toNumber(), timestamp: BigNumber.from(extraData.timestamp).toNumber(),
submitter: extraData.submitter, submitter: extraData.submitter,
l1TransactionHash: extraData.l1TransactionHash, l1TransactionHash: extraData.l1TransactionHash,
type: BatchType[decoded.type],
} }
return { return {
...@@ -206,61 +222,21 @@ export const handleEventsSequencerBatchAppended: EventHandlerSet< ...@@ -206,61 +222,21 @@ export const handleEventsSequencerBatchAppended: EventHandlerSet<
}, },
} }
interface SequencerBatchContext { const mapSequencerTransaction = (
numSequencedTransactions: number tx: Transaction,
numSubsequentQueueTransactions: number
timestamp: number
blockNumber: number
}
const parseSequencerBatchContext = (
calldata: Buffer,
offset: number
): SequencerBatchContext => {
return {
numSequencedTransactions: BigNumber.from(
calldata.slice(offset, offset + 3)
).toNumber(),
numSubsequentQueueTransactions: BigNumber.from(
calldata.slice(offset + 3, offset + 6)
).toNumber(),
timestamp: BigNumber.from(
calldata.slice(offset + 6, offset + 11)
).toNumber(),
blockNumber: BigNumber.from(
calldata.slice(offset + 11, offset + 16)
).toNumber(),
}
}
const parseSequencerBatchTransaction = (
calldata: Buffer,
offset: number
): Buffer => {
const transactionLength = BigNumber.from(
calldata.slice(offset, offset + 3)
).toNumber()
return calldata.slice(offset + 3, offset + 3 + transactionLength)
}
const decodeSequencerBatchTransaction = (
transaction: Buffer,
l2ChainId: number l2ChainId: number
): DecodedSequencerBatchTransaction => { ): DecodedSequencerBatchTransaction => {
const decodedTx = ethers.utils.parseTransaction(transaction)
return { return {
nonce: BigNumber.from(decodedTx.nonce).toString(), nonce: BigNumber.from(tx.nonce).toString(),
gasPrice: BigNumber.from(decodedTx.gasPrice).toString(), gasPrice: BigNumber.from(tx.gasPrice).toString(),
gasLimit: BigNumber.from(decodedTx.gasLimit).toString(), gasLimit: BigNumber.from(tx.gasLimit).toString(),
value: toRpcHexString(decodedTx.value), value: toRpcHexString(tx.value),
target: decodedTx.to ? toHexString(decodedTx.to) : null, target: tx.to ? toHexString(tx.to) : null,
data: toHexString(decodedTx.data), data: toHexString(tx.data),
sig: { sig: {
v: parseSignatureVParam(decodedTx.v, l2ChainId), v: parseSignatureVParam(tx.v, l2ChainId),
r: toHexString(decodedTx.r), r: toHexString(tx.r),
s: toHexString(decodedTx.s), s: toHexString(tx.s),
}, },
} }
} }
...@@ -60,6 +60,7 @@ export const handleEventsStateBatchAppended: EventHandlerSet< ...@@ -60,6 +60,7 @@ export const handleEventsStateBatchAppended: EventHandlerSet<
prevTotalElements: event.args._prevTotalElements.toNumber(), prevTotalElements: event.args._prevTotalElements.toNumber(),
extraData: event.args._extraData, extraData: event.args._extraData,
l1TransactionHash: extraData.l1TransactionHash, l1TransactionHash: extraData.l1TransactionHash,
type: 'LEGACY', // There is currently only 1 state root batch type
} }
return { return {
......
...@@ -48,6 +48,7 @@ interface BatchEntry { ...@@ -48,6 +48,7 @@ interface BatchEntry {
prevTotalElements: number prevTotalElements: number
extraData: string extraData: string
l1TransactionHash: string l1TransactionHash: string
type: string
} }
export type TransactionBatchEntry = BatchEntry export type TransactionBatchEntry = BatchEntry
......
/* External Imports */ import fs from 'fs'
import path from 'path'
import chai = require('chai') import chai = require('chai')
import Mocha from 'mocha' import Mocha from 'mocha'
import chaiAsPromised from 'chai-as-promised' import chaiAsPromised from 'chai-as-promised'
import { BigNumber } from 'ethers'
// Chai plugins go here. // Chai plugins go here.
chai.use(chaiAsPromised) chai.use(chaiAsPromised)
...@@ -9,4 +12,38 @@ chai.use(chaiAsPromised) ...@@ -9,4 +12,38 @@ chai.use(chaiAsPromised)
const should = chai.should() const should = chai.should()
const expect = chai.expect const expect = chai.expect
export { should, expect, Mocha } const readMockData = () => {
const mockDataPath = path.join(__dirname, 'unit-tests', 'examples')
const paths = fs.readdirSync(mockDataPath)
const files = []
for (const filename of paths) {
// Skip non .txt files
if (!filename.endsWith('.txt')) {
continue
}
const filePath = path.join(mockDataPath, filename)
const file = fs.readFileSync(filePath)
const obj = JSON.parse(file.toString())
// Reserialize the BigNumbers
obj.input.extraData.prevTotalElements = BigNumber.from(
obj.input.extraData.prevTotalElements
)
obj.input.extraData.batchIndex = BigNumber.from(
obj.input.extraData.batchIndex
)
if (obj.input.event.args.length !== 3) {
throw new Error(`ABI mismatch`)
}
obj.input.event.args = obj.input.event.args.map(BigNumber.from)
obj.input.event.args._startingQueueIndex = obj.input.event.args[0]
obj.input.event.args._numQueueElements = obj.input.event.args[1]
obj.input.event.args._totalElements = obj.input.event.args[2]
obj.input.extraData.batchSize = BigNumber.from(
obj.input.extraData.batchSize
)
files.push(obj)
}
return files
}
export { should, expect, Mocha, readMockData }
import { BigNumber, ethers } from 'ethers' import { BigNumber, ethers } from 'ethers'
import { sequencerBatch, add0x, BatchType } from '@eth-optimism/core-utils'
const compressBatchWithZlib = (calldata: string): string => {
const batch = sequencerBatch.decode(calldata)
batch.type = BatchType.ZLIB
const encoded = sequencerBatch.encode(batch)
return add0x(encoded)
}
/* Imports: Internal */ /* Imports: Internal */
import { expect } from '../../../../setup' import { expect, readMockData } from '../../../../setup'
import { handleEventsSequencerBatchAppended } from '../../../../../src/services/l1-ingestion/handlers/sequencer-batch-appended' import { handleEventsSequencerBatchAppended } from '../../../../../src/services/l1-ingestion/handlers/sequencer-batch-appended'
import { SequencerBatchAppendedExtraData } from '../../../../../src/types' import { SequencerBatchAppendedExtraData } from '../../../../../src/types'
describe('Event Handlers: CanonicalTransactionChain.SequencerBatchAppended', () => { describe('Event Handlers: CanonicalTransactionChain.SequencerBatchAppended', () => {
const mockData = readMockData()
describe('handleEventsSequencerBatchAppended.parseEvent', () => { describe('handleEventsSequencerBatchAppended.parseEvent', () => {
// This tests the behavior of parsing a real mainnet transaction, // This tests the behavior of parsing a real mainnet transaction,
// so it will break if the encoding scheme changes. // so it will break if the encoding scheme changes.
...@@ -46,9 +56,53 @@ describe('Event Handlers: CanonicalTransactionChain.SequencerBatchAppended', () ...@@ -46,9 +56,53 @@ describe('Event Handlers: CanonicalTransactionChain.SequencerBatchAppended', ()
expect(() => { expect(() => {
handleEventsSequencerBatchAppended.parseEvent(...input1) handleEventsSequencerBatchAppended.parseEvent(...input1)
}).to.throw( }).to.throw(
`Block ${input1[1].blockNumber} transaction data is invalid for decoding: ${input1[1].l1TransactionData} , ` + `Block ${input1[1].blockNumber} transaction data is too small: ${input1[1].l1TransactionData.length}`
`converted buffer length is < 12.`
) )
}) })
describe('mainnet transactions', () => {
for (const mock of mockData) {
const { input, output } = mock
const { event, extraData, l2ChainId } = input
const hash = mock.input.extraData.l1TransactionHash
it(`uncompressed: ${hash}`, () => {
// Set the type to be legacy
output.transactionBatchEntry.type = BatchType[BatchType.LEGACY]
const res = handleEventsSequencerBatchAppended.parseEvent(
event,
extraData,
l2ChainId
)
// Check all of the transaction entries individually
for (const [i, got] of res.transactionEntries.entries()) {
const expected = output.transactionEntries[i]
expect(got).to.deep.eq(expected, `case ${i}`)
}
expect(res).to.deep.eq(output)
})
it(`compressed: ${hash}`, () => {
// Set the type to be zlib
output.transactionBatchEntry.type = BatchType[BatchType.ZLIB]
const compressed = compressBatchWithZlib(
input.extraData.l1TransactionData
)
const copy = { ...extraData }
copy.l1TransactionData = compressed
const res = handleEventsSequencerBatchAppended.parseEvent(
event,
copy,
l2ChainId
)
expect(res).to.deep.eq(output)
})
}
})
}) })
}) })
{ {
"extends": "../../tsconfig.json" "extends": "../../tsconfig.json",
"typeRoots": ["node_modules/@types", "src/@types"]
} }
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment