Commit 8f5982d2 authored by smartcontracts's avatar smartcontracts Committed by GitHub

pkg: Add DTL (#11)

* pkg: Add DTL

* fix: use correct tsconfig when building
Co-authored-by: default avatarGeorgios Konstantopoulos <me@gakonst.com>
parent 46ec5bc3
node_modules node_modules
dist **/dist
results results
.nyc_output .nyc_output
*.tsbuildinfo *.tsbuildinfo
# General options
DATA_TRANSPORT_LAYER__DB_PATH=./db
DATA_TRANSPORT_LAYER__ADDRESS_MANAGER=
DATA_TRANSPORT_LAYER__POLLING_INTERVAL=5000
DATA_TRANSPORT_LAYER__DANGEROUSLY_CATCH_ALL_ERRORS=true
DATA_TRANSPORT_LAYER__CONFIRMATIONS=12
# Server options
DATA_TRANSPORT_LAYER__SERVER_HOSTNAME=localhost
DATA_TRANSPORT_LAYER__SERVER_PORT=7878
# Set to "true" if you want to sync confirmed transactions from L1 (Ethereum).
# You probably want to set this to "true".
DATA_TRANSPORT_LAYER__SYNC_FROM_L1=true
DATA_TRANSPORT_LAYER__L1_RPC_ENDPOINT=
DATA_TRANSPORT_LAYER__LOGS_PER_POLLING_INTERVAL=2000
# Set to "true" if you want to sync unconfirmed transactions from a sequencer.
# Make sure to fill in the below values if you intend to do so.
DATA_TRANSPORT_LAYER__SYNC_FROM_L2=false
DATA_TRANSPORT_LAYER__L2_RPC_ENDPOINT=
DATA_TRANSPORT_LAYER__TRANSACTIONS_PER_POLLING_INTERVAL=1000
DATA_TRANSPORT_LAYER__L2_CHAIN_ID=69
DATA_TRANSPORT_LAYER__LEGACY_SEQUENCER_COMPATIBILITY=false
/db/
node_modules/
yarn-error.log
.env
test/temp/
build/
\ No newline at end of file
# data transport layer
## v0.1.2
- Fix bug in L2 sync
## v0.1.1
- Prioritize L2 synced API requests
- Stop syncing L2 at a certain height
## v0.1.0
- Sync From L1
- Sync From L2
- Initial Release
(The MIT License)
Copyright 2020-2021 Optimism
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# @eth-optimism/data-transport-layer
## What is this?
The Optimistic Ethereum Data Transport Layer is a long-running software service (written in TypeScript) designed to reliably index Optimistic Ethereum transaction data from Layer 1 (Ethereum). Specifically, this service indexes:
* Transactions that have been enqueued for submission to the CanonicalTransactionChain via [`enqueue`](https://github.com/ethereum-optimism/contracts-v2/blob/13b7deef60f773241723ea874fc6e81b4003b164/contracts/optimistic-ethereum/OVM/chain/OVM_CanonicalTransactionChain.sol#L225-L231).
* Transactions that have been included in the CanonicalTransactionChain via [`appendQueueBatch`](https://github.com/ethereum-optimism/contracts-v2/blob/13b7deef60f773241723ea874fc6e81b4003b164/contracts/optimistic-ethereum/OVM/chain/OVM_CanonicalTransactionChain.sol#L302-L306) or [`appendSequencerBatch`](https://github.com/ethereum-optimism/contracts-v2/blob/13b7deef60f773241723ea874fc6e81b4003b164/contracts/optimistic-ethereum/OVM/chain/OVM_CanonicalTransactionChain.sol#L352-L354).
* State roots (transaction results) that have been published to the StateCommitmentChain via [`appendStateBatch`](https://github.com/ethereum-optimism/contracts-v2/blob/13b7deef60f773241723ea874fc6e81b4003b164/contracts/optimistic-ethereum/OVM/chain/OVM_StateCommitmentChain.sol#L127-L132).
## How does it work?
We run two sub-services, the [`L1IngestionService`](./src/services/l1-ingestion/service.ts) and the [`L1TransportServer`](./src/services/server/service.ts). The `L1IngestionService` is responsible for querying for the various events and transaction data necessary to accurately index information from our Layer 1 (Ethereum) smart contracts. The `L1TransportServer` simply provides an API for accessing this information.
## Getting started
### Configuration
See an example config at [.env.example](.env.example); copy into a `.env` file before running.
`L1_TRANSPORT__L1_RPC_ENDPOINT` can be the JSON RPC endpoint of any L1 Ethereum node. `L1_TRANSPORT__ADDRESS_MANAGER` should be the contract addresss of the Address Manager on the corresponding network; find their values in the [Regenesis repo](https://github.com/ethereum-optimism/regenesis).
### Building and usage
After cloning and switching to the repository, install dependencies:
```bash
$ yarn
```
Use the following commands to build, use, test, and lint:
```bash
$ yarn build
$ yarn start
$ yarn test
$ yarn lint
```
## Configuration
We're using `dotenv` for our configuration.
Copy `.env.example` into `.env`, feel free to modify it.
Here's the list of environment variables you can change:
| Variable | Default | Description |
| ------------------------------------------------------- | --------- | ------------ |
| DATA_TRANSPORT_LAYER__DB_PATH | ./db | Path to the database for this service. |
| DATA_TRANSPORT_LAYER__ADDRESS_MANAGER | - | Address of the AddressManager contract on L1. See [regenesis](https://github.com/ethereum-optimism/regenesis) repo to find this address for mainnet or kovan. |
| DATA_TRANSPORT_LAYER__POLLING_INTERVAL | 5000 | Period of time between execution loops. |
| DATA_TRANSPORT_LAYER__DANGEROUSLY_CATCH_ALL_ERRORS | false | If true, will catch all errors without throwing. |
| DATA_TRANSPORT_LAYER__CONFIRMATIONS | 12 | Number of confirmations to wait before accepting transactions as "canonical". |
| DATA_TRANSPORT_LAYER__SERVER_HOSTNAME | localhost | Host to run the API on. |
| DATA_TRANSPORT_LAYER__SERVER_PORT | 7878 | Port to run the API on. |
| DATA_TRANSPORT_LAYER__SYNC_FROM_L1 | true | Whether or not to sync from L1. |
| DATA_TRANSPORT_LAYER__L1_RPC_ENDPOINT | - | RPC endpoint for an L1 node. |
| DATA_TRANSPORT_LAYER__LOGS_PER_POLLING_INTERVAL | 2000 | Logs to sync per polling interval. |
| DATA_TRANSPORT_LAYER__SYNC_FROM_L2 | false | Whether or not to sync from L2. |
| DATA_TRANSPORT_LAYER__L2_RPC_ENDPOINT | - | RPC endpoint for an L2 node. |
| DATA_TRANSPORT_LAYER__TRANSACTIONS_PER_POLLING_INTERVAL | 1000 | Number of L2 transactions to query per polling interval. |
| DATA_TRANSPORT_LAYER__L2_CHAIN_ID | - | L2 chain ID. |
| DATA_TRANSPORT_LAYER__LEGACY_SEQUENCER_COMPATIBILITY | false | Whether or not to enable "legacy" sequencer sync (without the custom `eth_getBlockRange` endpoint) |
## HTTP API
This section describes the HTTP API for accessing indexed Layer 1 data.
### Latest Ethereum Block Context
#### Request
```
GET /eth/context/latest
```
#### Response
```ts
{
"blockNumber": number,
"timestamp": number
}
```
### Enqueue by Index
#### Request
```
GET /enqueue/index/{index: number}
```
#### Response
```ts
{
"index": number,
"target": string,
"data": string,
"gasLimit": number,
"origin": string,
"blockNumber": number,
"timestamp": number
}
```
### Transaction by Index
#### Request
```
GET /transaction/index/{index: number}
```
#### Response
```ts
{
"transaction": {
"index": number,
"batchIndex": number,
"data": string,
"blockNumber": number,
"timestamp": number,
"gasLimit": number,
"target": string,
"origin": string,
"queueOrigin": string,
"type": string | null,
"decoded": {
"sig": {
"r": string,
"s": string,
"v": string
},
"gasLimit": number,
"gasPrice": number,
"nonce": number,
"target": string,
"data": string
} | null,
"queueIndex": number | null,
},
"batch": {
"index": number,
"blockNumber": number,
"timestamp": number,
"submitter": string,
"size": number,
"root": string,
"prevTotalElements": number,
"extraData": string
}
}
```
### Transaction Batch by Index
#### Request
```
GET /batch/transaction/index/{index: number}
```
#### Response
```ts
{
"batch": {
"index": number,
"blockNumber": number,
"timestamp": number,
"submitter": string,
"size": number,
"root": string,
"prevTotalElements": number,
"extraData": string
},
"transactions": [
{
"index": number,
"batchIndex": number,
"data": string,
"blockNumber": number,
"timestamp": number,
"gasLimit": number,
"target": string,
"origin": string,
"queueOrigin": string,
"type": string | null,
"decoded": {
"sig": {
"r": string,
"s": string,
"v": string
},
"gasLimit": number,
"gasPrice": number,
"nonce": number,
"target": string,
"data": string
} | null,
"queueIndex": number | null,
}
]
}
```
### State Root by Index
#### Request
```
GET /stateroot/index/{index: number}
```
#### Response
```ts
{
"stateRoot": {
"index": number,
"batchIndex": number,
"value": string
},
"batch": {
"index": number,
"blockNumber": number,
"timestamp": number,
"submitter": string,
"size": number,
"root": string,
"prevTotalElements": number,
"extraData": string
},
}
```
### State Root Batch by Index
#### Request
```
GET /batch/stateroot/index/{index: number}
```
#### Response
```ts
{
"batch": {
"index": number,
"blockNumber": number,
"timestamp": number,
"submitter": string,
"size": number,
"root": string,
"prevTotalElements": number,
"extraData": string
},
"stateRoots": [
{
"index": number,
"batchIndex": number,
"value": string
}
]
}
```
This diff is collapsed.
{
"name": "@eth-optimism/data-transport-layer",
"version": "0.1.2",
"main": "build/index.js",
"license": "MIT",
"files": [
"build/**/*.js",
"build/**/*.js.map",
"build/**/*.ts"
],
"types": "build/index.d.ts",
"scripts": {
"clean": "rimraf ./build",
"clean:db": "rimraf ./db",
"lint": "yarn run lint:fix && yarn run lint:check",
"lint:check": "tslint --format stylish --project .",
"lint:fix": "prettier --config prettier-config.json --write \"{src,exec,test}/**/*.ts\"",
"start": "ts-node ./src/services/run.ts",
"start:local": "ts-node ./src/services/run.ts | pino-pretty",
"test": "hardhat --config test/config/hardhat.config.ts test",
"build": "tsc -p tsconfig.build.json"
},
"dependencies": {
"@eth-optimism/contracts": "^0.1.6",
"@eth-optimism/core-utils": "^0.1.10",
"@eth-optimism/service-base": "^1.1.5",
"@ethersproject/providers": "^5.0.21",
"@types/express": "^4.17.11",
"bcfg": "^0.1.6",
"browser-or-node": "^1.3.0",
"colors": "^1.4.0",
"cors": "^2.8.5",
"dotenv": "^8.2.0",
"ethers": "^5.0.26",
"express": "^4.17.1",
"level": "^6.0.1",
"levelup": "^4.4.0",
"node-fetch": "^2.6.1"
},
"devDependencies": {
"@eth-optimism/dev": "^1.1.1",
"@nomiclabs/hardhat-ethers": "^2.0.1",
"@types/browser-or-node": "^1.3.0",
"@types/cors": "^2.8.9",
"@types/levelup": "^4.3.0",
"@types/node-fetch": "^2.5.8",
"@types/rimraf": "^3.0.0",
"chai": "^4.3.4",
"chai-as-promised": "^7.1.1",
"hardhat": "^2.0.9",
"mocha": "^8.3.2",
"pino-pretty": "^4.7.1",
"rimraf": "^3.0.2",
"ts-node": "^9.1.1",
"typescript": "^4.2.3"
}
}
../../prettier-config.json
\ No newline at end of file
// Only load if not in browser.
import { isNode } from 'browser-or-node'
declare var window: any
/* tslint:disable-next-line:no-var-requires */
const fetch = isNode ? require('node-fetch') : window.fetch
import {
EnqueueResponse,
StateRootBatchResponse,
StateRootResponse,
SyncingResponse,
TransactionBatchResponse,
TransactionResponse,
} from '../types'
export class L1DataTransportClient {
constructor(private url: string) {}
public async syncing(): Promise<SyncingResponse> {
return this._get(`/eth/syncing`)
}
public async getEnqueueByIndex(index: number): Promise<EnqueueResponse> {
return this._get(`/enqueue/index/${index}`)
}
public async getLatestEnqueue(): Promise<EnqueueResponse> {
return this._get(`/enqueue/latest`)
}
public async getTransactionByIndex(
index: number
): Promise<TransactionResponse> {
return this._get(`/transaction/index/${index}`)
}
public async getLatestTransacton(): Promise<TransactionResponse> {
return this._get(`/transaction/latest`)
}
public async getTransactionBatchByIndex(
index: number
): Promise<TransactionBatchResponse> {
return this._get(`/batch/transaction/index/${index}`)
}
public async getLatestTransactionBatch(): Promise<TransactionBatchResponse> {
return this._get(`/batch/transaction/latest`)
}
public async getStateRootByIndex(index: number): Promise<StateRootResponse> {
return this._get(`/stateroot/index/${index}`)
}
public async getLatestStateRoot(): Promise<StateRootResponse> {
return this._get(`/stateroot/latest`)
}
public async getStateRootBatchByIndex(
index: number
): Promise<StateRootBatchResponse> {
return this._get(`/batch/stateroot/index/${index}`)
}
public async getLatestStateRootBatch(): Promise<StateRootBatchResponse> {
return this._get(`/batch/stateroot/latest`)
}
private async _get<TResponse>(endpoint: string): Promise<TResponse> {
return (await fetch(`${this.url}${endpoint}`)).json()
}
}
/* Imports: External */
import { LevelUp } from 'levelup'
import { BigNumber } from 'ethers'
export class SimpleDB {
constructor(public db: LevelUp) {}
public async get<TEntry>(key: string, index: number): Promise<TEntry | null> {
try {
// TODO: Better checks here.
return JSON.parse(await this.db.get(this._makeKey(key, index)))
} catch (err) {
return null
}
}
public async range<TEntry>(
key: string,
startIndex: number,
endIndex: number
): Promise<TEntry[] | []> {
try {
return new Promise<any[]>((resolve, reject) => {
const entries: any[] = []
this.db
.createValueStream({
gte: this._makeKey(key, startIndex),
lt: this._makeKey(key, endIndex),
})
.on('data', (transaction: string) => {
entries.push(JSON.parse(transaction))
})
.on('error', (err: any) => {
resolve(null)
})
.on('close', () => {
// TODO: Close vs end? Need to double check later.
resolve(entries)
})
.on('end', () => {
resolve(entries)
})
})
} catch (err) {
return []
}
}
public async put<TEntry>(
entries: {
key: string
index: number
value: TEntry
}[]
): Promise<void> {
return this.db.batch(
entries.map((entry) => {
return {
type: 'put',
key: this._makeKey(entry.key, entry.index),
value: JSON.stringify(entry.value),
}
})
)
}
private _makeKey(key: string, index: number): string {
return `${key}:${BigNumber.from(index).toString().padStart(32, '0')}`
}
}
This diff is collapsed.
export * from './client/client'
export * from './types'
/* Imports: External */
import { BigNumber, ethers, constants } from 'ethers'
import { getContractFactory } from '@eth-optimism/contracts'
import {
ctcCoder,
fromHexString,
toHexString,
TxType,
} from '@eth-optimism/core-utils'
/* Imports: Internal */
import {
DecodedSequencerBatchTransaction,
EventArgsSequencerBatchAppended,
TransactionBatchEntry,
TransactionEntry,
EventHandlerSet,
} from '../../../types'
import {
SEQUENCER_ENTRYPOINT_ADDRESS,
SEQUENCER_GAS_LIMIT,
} from '../../../utils'
export interface SequencerBatchAppendedExtraData {
timestamp: number
blockNumber: number
submitter: string
l1TransactionData: string
l1TransactionHash: string
gasLimit: number
// Stuff from TransactionBatchAppended.
prevTotalElements: BigNumber
batchIndex: BigNumber
batchSize: BigNumber
batchRoot: string
batchExtraData: string
}
export interface SequencerBatchAppendedParsedEvent {
transactionBatchEntry: TransactionBatchEntry
transactionEntries: TransactionEntry[]
}
export const handleEventsSequencerBatchAppended: EventHandlerSet<
EventArgsSequencerBatchAppended,
SequencerBatchAppendedExtraData,
SequencerBatchAppendedParsedEvent
> = {
getExtraData: async (event, l1RpcProvider) => {
const l1Transaction = await event.getTransaction()
const eventBlock = await event.getBlock()
// TODO: We need to update our events so that we actually have enough information to parse this
// batch without having to pull out this extra event. For the meantime, we need to find this
// "TransactonBatchAppended" event to get the rest of the data.
const OVM_CanonicalTransactionChain = getContractFactory(
'OVM_CanonicalTransactionChain'
)
.attach(event.address)
.connect(l1RpcProvider)
const batchSubmissionEvent = (
await OVM_CanonicalTransactionChain.queryFilter(
OVM_CanonicalTransactionChain.filters.TransactionBatchAppended(),
eventBlock.number,
eventBlock.number
)
).find((foundEvent: ethers.Event) => {
// We might have more than one event in this block, so we specifically want to find a
// "TransactonBatchAppended" event emitted immediately before the event in question.
return (
foundEvent.transactionHash === event.transactionHash &&
foundEvent.logIndex === event.logIndex - 1
)
})
if (!batchSubmissionEvent) {
throw new Error(
`Well, this really shouldn't happen. A SequencerBatchAppended event doesn't have a corresponding TransactionBatchAppended event.`
)
}
return {
timestamp: eventBlock.timestamp,
blockNumber: eventBlock.number,
submitter: l1Transaction.from,
l1TransactionHash: l1Transaction.hash,
l1TransactionData: l1Transaction.data,
gasLimit: SEQUENCER_GAS_LIMIT,
prevTotalElements: batchSubmissionEvent.args._prevTotalElements,
batchIndex: batchSubmissionEvent.args._batchIndex,
batchSize: batchSubmissionEvent.args._batchSize,
batchRoot: batchSubmissionEvent.args._batchRoot,
batchExtraData: batchSubmissionEvent.args._extraData,
}
},
parseEvent: (event, extraData) => {
const transactionEntries: TransactionEntry[] = []
// It's easier to deal with this data if it's a Buffer.
const calldata = fromHexString(extraData.l1TransactionData)
if (calldata.length < 12) {
throw new Error(
`Block ${extraData.blockNumber} transaction data is invalid for decoding: ${extraData.l1TransactionData} , ` +
`converted buffer length is < 12.`
)
}
const numContexts = BigNumber.from(calldata.slice(12, 15)).toNumber()
let transactionIndex = 0
let enqueuedCount = 0
let nextTxPointer = 15 + 16 * numContexts
for (let i = 0; i < numContexts; i++) {
const contextPointer = 15 + 16 * i
const context = parseSequencerBatchContext(calldata, contextPointer)
for (let j = 0; j < context.numSequencedTransactions; j++) {
const sequencerTransaction = parseSequencerBatchTransaction(
calldata,
nextTxPointer
)
const { decoded, type } = maybeDecodeSequencerBatchTransaction(
sequencerTransaction
)
transactionEntries.push({
index: extraData.prevTotalElements
.add(BigNumber.from(transactionIndex))
.toNumber(),
batchIndex: extraData.batchIndex.toNumber(),
blockNumber: BigNumber.from(context.blockNumber).toNumber(),
timestamp: BigNumber.from(context.timestamp).toNumber(),
gasLimit: BigNumber.from(extraData.gasLimit).toNumber(),
target: SEQUENCER_ENTRYPOINT_ADDRESS,
origin: null,
data: toHexString(sequencerTransaction),
queueOrigin: 'sequencer',
type,
queueIndex: null,
decoded,
confirmed: true,
})
nextTxPointer += 3 + sequencerTransaction.length
transactionIndex++
}
for (let j = 0; j < context.numSubsequentQueueTransactions; j++) {
const queueIndex = event.args._startingQueueIndex.add(
BigNumber.from(enqueuedCount)
)
// Okay, so. Since events are processed in parallel, we don't know if the Enqueue
// event associated with this queue element has already been processed. So we'll ask
// the api to fetch that data for itself later on and we use fake values for some
// fields. The real TODO here is to make sure we fix this data structure to avoid ugly
// "dummy" fields.
transactionEntries.push({
index: extraData.prevTotalElements
.add(BigNumber.from(transactionIndex))
.toNumber(),
batchIndex: extraData.batchIndex.toNumber(),
blockNumber: BigNumber.from(0).toNumber(),
timestamp: BigNumber.from(0).toNumber(),
gasLimit: BigNumber.from(0).toNumber(),
target: constants.AddressZero,
origin: constants.AddressZero,
data: '0x',
queueOrigin: 'l1',
type: 'EIP155',
queueIndex: queueIndex.toNumber(),
decoded: null,
confirmed: true,
})
enqueuedCount++
transactionIndex++
}
}
const transactionBatchEntry: TransactionBatchEntry = {
index: extraData.batchIndex.toNumber(),
root: extraData.batchRoot,
size: extraData.batchSize.toNumber(),
prevTotalElements: extraData.prevTotalElements.toNumber(),
extraData: extraData.batchExtraData,
blockNumber: BigNumber.from(extraData.blockNumber).toNumber(),
timestamp: BigNumber.from(extraData.timestamp).toNumber(),
submitter: extraData.submitter,
l1TransactionHash: extraData.l1TransactionHash,
}
return {
transactionBatchEntry,
transactionEntries,
}
},
storeEvent: async (entry, db) => {
await db.putTransactionBatchEntries([entry.transactionBatchEntry])
await db.putTransactionEntries(entry.transactionEntries)
// Add an additional field to the enqueued transactions in the database
// if they have already been confirmed
for (const transactionEntry of entry.transactionEntries) {
if (transactionEntry.queueOrigin === 'l1') {
await db.putTransactionIndexByQueueIndex(
transactionEntry.queueIndex,
transactionEntry.index
)
}
}
},
}
interface SequencerBatchContext {
numSequencedTransactions: number
numSubsequentQueueTransactions: number
timestamp: number
blockNumber: number
}
const parseSequencerBatchContext = (
calldata: Buffer,
offset: number
): SequencerBatchContext => {
return {
numSequencedTransactions: BigNumber.from(
calldata.slice(offset, offset + 3)
).toNumber(),
numSubsequentQueueTransactions: BigNumber.from(
calldata.slice(offset + 3, offset + 6)
).toNumber(),
timestamp: BigNumber.from(
calldata.slice(offset + 6, offset + 11)
).toNumber(),
blockNumber: BigNumber.from(
calldata.slice(offset + 11, offset + 16)
).toNumber(),
}
}
const parseSequencerBatchTransaction = (
calldata: Buffer,
offset: number
): Buffer => {
const transactionLength = BigNumber.from(
calldata.slice(offset, offset + 3)
).toNumber()
return calldata.slice(offset + 3, offset + 3 + transactionLength)
}
const maybeDecodeSequencerBatchTransaction = (
transaction: Buffer
): {
decoded: DecodedSequencerBatchTransaction | null
type: 'EIP155' | 'ETH_SIGN' | null
} => {
let decoded = null
let type = null
try {
const txType = transaction.slice(0, 1).readUInt8()
if (txType === TxType.EIP155) {
type = 'EIP155'
decoded = ctcCoder.eip155TxData.decode(transaction.toString('hex'))
} else if (txType === TxType.EthSign) {
type = 'ETH_SIGN'
decoded = ctcCoder.ethSignTxData.decode(transaction.toString('hex'))
} else {
throw new Error(`Unknown sequencer transaction type.`)
}
// Validate the transaction
if (!validateBatchTransaction(type, decoded)) {
decoded = null
}
} catch (err) {
// Do nothing
}
return {
decoded,
type,
}
}
export function validateBatchTransaction(
type: string | null,
decoded: DecodedSequencerBatchTransaction | null
): boolean {
// Unknown types are considered invalid
if (type === null) {
return false
}
if (type === 'EIP155' || type === 'ETH_SIGN') {
if (decoded.sig.v !== 1 && decoded.sig.v !== 0) {
return false
}
return true
}
// Allow soft forks
return false
}
/* Imports: External */
import { getContractFactory } from '@eth-optimism/contracts'
import { BigNumber } from 'ethers'
/* Imports: Internal */
import {
EventArgsStateBatchAppended,
StateRootBatchEntry,
StateRootEntry,
EventHandlerSet,
} from '../../../types'
export const handleEventsStateBatchAppended: EventHandlerSet<
EventArgsStateBatchAppended,
{
timestamp: number
blockNumber: number
submitter: string
l1TransactionHash: string
l1TransactionData: string
},
{
stateRootBatchEntry: StateRootBatchEntry
stateRootEntries: StateRootEntry[]
}
> = {
getExtraData: async (event) => {
const eventBlock = await event.getBlock()
const l1Transaction = await event.getTransaction()
return {
timestamp: eventBlock.timestamp,
blockNumber: eventBlock.number,
submitter: l1Transaction.from,
l1TransactionHash: l1Transaction.hash,
l1TransactionData: l1Transaction.data,
}
},
parseEvent: (event, extraData) => {
const stateRoots = getContractFactory(
'OVM_StateCommitmentChain'
).interface.decodeFunctionData(
'appendStateBatch',
extraData.l1TransactionData
)[0]
const stateRootEntries: StateRootEntry[] = []
for (let i = 0; i < stateRoots.length; i++) {
stateRootEntries.push({
index: event.args._prevTotalElements.add(BigNumber.from(i)).toNumber(),
batchIndex: event.args._batchIndex.toNumber(),
value: stateRoots[i],
confirmed: true,
})
}
// Using .toNumber() here and in other places because I want to move everything to use
// BigNumber + hex, but that'll take a lot of work. This makes it easier in the future.
const stateRootBatchEntry: StateRootBatchEntry = {
index: event.args._batchIndex.toNumber(),
blockNumber: BigNumber.from(extraData.blockNumber).toNumber(),
timestamp: BigNumber.from(extraData.timestamp).toNumber(),
submitter: extraData.submitter,
size: event.args._batchSize.toNumber(),
root: event.args._batchRoot,
prevTotalElements: event.args._prevTotalElements.toNumber(),
extraData: event.args._extraData,
l1TransactionHash: extraData.l1TransactionHash,
}
return {
stateRootBatchEntry,
stateRootEntries,
}
},
storeEvent: async (entry, db) => {
await db.putStateRootBatchEntries([entry.stateRootBatchEntry])
await db.putStateRootEntries(entry.stateRootEntries)
},
}
/* Imports: Internal */
import { BigNumber } from 'ethers'
import {
EnqueueEntry,
EventArgsTransactionEnqueued,
EventHandlerSet,
} from '../../../types'
export const handleEventsTransactionEnqueued: EventHandlerSet<
EventArgsTransactionEnqueued,
null,
EnqueueEntry
> = {
getExtraData: async () => {
return null
},
parseEvent: (event) => {
return {
index: event.args._queueIndex.toNumber(),
target: event.args._target,
data: event.args._data,
gasLimit: event.args._gasLimit.toNumber(),
origin: event.args._l1TxOrigin,
blockNumber: BigNumber.from(event.blockNumber).toNumber(),
timestamp: event.args._timestamp.toNumber(),
ctcIndex: null,
}
},
storeEvent: async (entry, db) => {
await db.putEnqueueEntries([entry])
},
}
/* Imports: External */
import { ctcCoder } from '@eth-optimism/core-utils'
import { BigNumber, constants, ethers } from 'ethers'
/* Imports: Internal */
import { TransportDB } from '../../../db/transport-db'
import {
DecodedSequencerBatchTransaction,
StateRootEntry,
TransactionEntry,
} from '../../../types'
import {
padHexString,
SEQUENCER_ENTRYPOINT_ADDRESS,
SEQUENCER_GAS_LIMIT,
} from '../../../utils'
export const handleSequencerBlock = {
parseBlock: async (
block: any,
chainId: number
): Promise<{
transactionEntry: TransactionEntry
stateRootEntry: StateRootEntry
}> => {
const transaction = block.transactions[0]
const transactionIndex =
transaction.index === null || transaction.index === undefined
? BigNumber.from(transaction.blockNumber).toNumber() - 1
: BigNumber.from(transaction.index).toNumber()
let transactionEntry: Partial<TransactionEntry> = {
// Legacy support.
index: transactionIndex,
batchIndex: null,
blockNumber: BigNumber.from(transaction.l1BlockNumber).toNumber(),
timestamp: BigNumber.from(transaction.l1Timestamp).toNumber(),
queueOrigin: transaction.queueOrigin,
type: parseTxType(transaction.txType),
confirmed: false,
}
if (transaction.queueOrigin === 'sequencer') {
const decodedTransaction: DecodedSequencerBatchTransaction = {
sig: {
v: BigNumber.from(transaction.v).toNumber() - 2 * chainId - 35,
r: padHexString(transaction.r, 32),
s: padHexString(transaction.s, 32),
},
gasLimit: BigNumber.from(transaction.gas).toNumber(),
gasPrice: BigNumber.from(transaction.gasPrice).toNumber(), // ?
nonce: BigNumber.from(transaction.nonce).toNumber(),
target: transaction.to || constants.AddressZero, // ?
data: transaction.input,
type: transaction.txType,
}
transactionEntry = {
...transactionEntry,
gasLimit: SEQUENCER_GAS_LIMIT, // ?
target: SEQUENCER_ENTRYPOINT_ADDRESS,
origin: null,
data: maybeEncodeSequencerBatchTransaction(
decodedTransaction,
transaction.txType
),
decoded: decodedTransaction,
queueIndex: null,
}
} else {
transactionEntry = {
...transactionEntry,
gasLimit: BigNumber.from(transaction.gas).toNumber(),
target: ethers.utils.getAddress(transaction.to),
origin: ethers.utils.getAddress(transaction.l1TxOrigin),
data: transaction.input,
decoded: null,
queueIndex:
transaction.queueIndex === null ||
transaction.queueIndex === undefined
? BigNumber.from(transaction.nonce).toNumber()
: BigNumber.from(transaction.queueIndex).toNumber(),
}
}
const stateRootEntry: StateRootEntry = {
index: transactionIndex,
batchIndex: null,
value: block.stateRoot,
confirmed: false,
}
return {
transactionEntry: transactionEntry as TransactionEntry, // Not the cleanest thing in the world. Could be improved.
stateRootEntry,
}
},
storeBlock: async (
entry: {
transactionEntry: TransactionEntry
stateRootEntry: StateRootEntry
},
db: TransportDB
): Promise<void> => {
// Having separate indices for confirmed/unconfirmed means we never have to worry about
// accidentally overwriting a confirmed transaction with an unconfirmed one. Unconfirmed
// transactions are purely extra information.
await db.putUnconfirmedTransactionEntries([entry.transactionEntry])
await db.putUnconfirmedStateRootEntries([entry.stateRootEntry])
},
}
/**
* Attempts to encode a sequencer batch transaction.
* @param transaction Transaction to encode.
* @param type Transaction type.
*/
const maybeEncodeSequencerBatchTransaction = (
transaction: DecodedSequencerBatchTransaction,
type: 'EIP155' | 'EthSign' | null
): string => {
if (type === 'EIP155') {
return ctcCoder.eip155TxData.encode(transaction).toLowerCase()
} else if (type === 'EthSign') {
return ctcCoder.ethSignTxData.encode(transaction).toLowerCase()
} else {
// Throw?
return
}
}
/**
* Handles differences between the sequencer's enum strings and our own.
* Will probably want to move this into core-utils eventually.
* @param type Sequencer transaction type to parse.
*/
const parseTxType = (
type: 'EIP155' | 'EthSign' | null
): 'EIP155' | 'ETH_SIGN' | null => {
if (type === 'EthSign') {
return 'ETH_SIGN'
} else {
return type
}
}
/* Imports: External */
import { BaseService } from '@eth-optimism/service-base'
import { JsonRpcProvider } from '@ethersproject/providers'
import colors from 'colors/safe'
import { BigNumber } from 'ethers'
import { LevelUp } from 'levelup'
/* Imports: Internal */
import { TransportDB } from '../../db/transport-db'
import { sleep, toRpcHexString, validators } from '../../utils'
import { L1DataTransportServiceOptions } from '../main/service'
import { handleSequencerBlock } from './handlers/transaction'
export interface L2IngestionServiceOptions
extends L1DataTransportServiceOptions {
db: LevelUp
}
export class L2IngestionService extends BaseService<L2IngestionServiceOptions> {
protected name = 'L2 Ingestion Service'
protected optionSettings = {
db: {
validate: validators.isLevelUP,
},
l2RpcProvider: {
validate: (val: any) => {
return validators.isUrl(val) || validators.isJsonRpcProvider(val)
},
},
l2ChainId: {
validate: validators.isInteger,
},
pollingInterval: {
default: 5000,
validate: validators.isInteger,
},
transactionsPerPollingInterval: {
default: 1000,
validate: validators.isInteger,
},
dangerouslyCatchAllErrors: {
default: false,
validate: validators.isBoolean,
},
legacySequencerCompatibility: {
default: false,
validate: validators.isBoolean,
},
stopL2SyncAtBlock: {
default: Infinity,
validate: validators.isInteger,
},
}
private state: {
db: TransportDB
l2RpcProvider: JsonRpcProvider
} = {} as any
protected async _init(): Promise<void> {
if (this.options.legacySequencerCompatibility) {
this.logger.info(
'Using legacy sync, this will be quite a bit slower than normal'
)
}
this.state.db = new TransportDB(this.options.db)
this.state.l2RpcProvider =
typeof this.options.l2RpcProvider === 'string'
? new JsonRpcProvider(this.options.l2RpcProvider)
: this.options.l2RpcProvider
}
protected async _start(): Promise<void> {
while (this.running) {
try {
const highestSyncedL2BlockNumber =
(await this.state.db.getHighestSyncedUnconfirmedBlock()) || 1
// Shut down if we're at the stop block.
if (
this.options.stopL2SyncAtBlock !== undefined &&
this.options.stopL2SyncAtBlock !== null &&
highestSyncedL2BlockNumber >= this.options.stopL2SyncAtBlock
) {
this.logger.info(
"L2 sync is shutting down because we've reached your target block. Goodbye!"
)
return
}
let currentL2Block = await this.state.l2RpcProvider.getBlockNumber()
// Make sure we can't exceed the stop block.
if (
this.options.stopL2SyncAtBlock !== undefined &&
this.options.stopL2SyncAtBlock !== null
) {
currentL2Block = Math.min(
currentL2Block,
this.options.stopL2SyncAtBlock
)
}
// Make sure we don't exceed the tip.
const targetL2Block = Math.min(
highestSyncedL2BlockNumber +
this.options.transactionsPerPollingInterval,
currentL2Block
)
// We're already at the head, so no point in attempting to sync.
if (highestSyncedL2BlockNumber === targetL2Block) {
await sleep(this.options.pollingInterval)
continue
}
this.logger.info(
'Synchronizing unconfirmed transactions from Layer 2 (Optimistic Ethereum)',
{
fromBlock: highestSyncedL2BlockNumber,
toBlock: targetL2Block,
}
)
// Synchronize by requesting blocks from the sequencer. Sync from L1 takes precedence.
await this._syncSequencerBlocks(
highestSyncedL2BlockNumber,
targetL2Block
)
await this.state.db.setHighestSyncedUnconfirmedBlock(targetL2Block)
if (
currentL2Block - highestSyncedL2BlockNumber <
this.options.transactionsPerPollingInterval
) {
await sleep(this.options.pollingInterval)
}
} catch (err) {
if (!this.running || this.options.dangerouslyCatchAllErrors) {
this.logger.error('Caught an unhandled error', { err })
await sleep(this.options.pollingInterval)
} else {
// TODO: Is this the best thing to do here?
throw err
}
}
}
}
/**
* Synchronizes unconfirmed transactions from a range of sequencer blocks.
* @param startBlockNumber Block to start querying from.
* @param endBlockNumber Block to query to.
*/
private async _syncSequencerBlocks(
startBlockNumber: number,
endBlockNumber: number
): Promise<void> {
if (startBlockNumber > endBlockNumber) {
this.logger.warn(
'Cannot query with start block number larger than end block number',
{
startBlockNumber,
endBlockNumber,
}
)
return
}
let blocks: any = []
if (this.options.legacySequencerCompatibility) {
const blockPromises = []
for (let i = startBlockNumber; i <= endBlockNumber; i++) {
blockPromises.push(
this.state.l2RpcProvider.send('eth_getBlockByNumber', [
toRpcHexString(i),
true,
])
)
}
// Just making sure that the blocks will come back in increasing order.
blocks = (await Promise.all(blockPromises)).sort((a, b) => {
return (
BigNumber.from(a.number).toNumber() -
BigNumber.from(b.number).toNumber()
)
})
} else {
blocks = await this.state.l2RpcProvider.send('eth_getBlockRange', [
toRpcHexString(startBlockNumber),
toRpcHexString(endBlockNumber),
true,
])
}
for (const block of blocks) {
const entry = await handleSequencerBlock.parseBlock(
block,
this.options.l2ChainId
)
await handleSequencerBlock.storeBlock(entry, this.state.db)
}
}
}
/* Imports: External */
import { BaseService } from '@eth-optimism/service-base'
import { LevelUp } from 'levelup'
import level from 'level'
/* Imports: Internal */
import { L1IngestionService } from '../l1-ingestion/service'
import { L1TransportServer } from '../server/service'
import { validators } from '../../utils'
import { L2IngestionService } from '../l2-ingestion/service'
export interface L1DataTransportServiceOptions {
addressManager: string
confirmations: number
dangerouslyCatchAllErrors?: boolean
hostname: string
l1RpcProvider: string
l2ChainId: number
l2RpcProvider: string
dbPath: string
logsPerPollingInterval: number
pollingInterval: number
port: number
showUnconfirmedTransactions: boolean
syncFromL1?: boolean
syncFromL2?: boolean
transactionsPerPollingInterval: number
legacySequencerCompatibility: boolean
stopL2SyncAtBlock?: number
}
export class L1DataTransportService extends BaseService<L1DataTransportServiceOptions> {
protected name = 'L1 Data Transport Service'
protected optionSettings = {
syncFromL1: {
default: true,
validate: validators.isBoolean,
},
syncFromL2: {
default: false,
validate: validators.isBoolean,
},
}
private state: {
db: LevelUp
l1IngestionService?: L1IngestionService
l2IngestionService?: L2IngestionService
l1TransportServer: L1TransportServer
} = {} as any
protected async _init(): Promise<void> {
this.state.db = level(this.options.dbPath)
await this.state.db.open()
this.state.l1TransportServer = new L1TransportServer({
...this.options,
db: this.state.db,
})
// Optionally enable sync from L1.
if (this.options.syncFromL1) {
this.state.l1IngestionService = new L1IngestionService({
...this.options,
db: this.state.db,
})
}
// Optionally enable sync from L2.
if (this.options.syncFromL2) {
this.state.l2IngestionService = new L2IngestionService({
...(this.options as any), // TODO: Correct thing to do here is to assert this type.
db: this.state.db,
})
}
await this.state.l1TransportServer.init()
if (this.options.syncFromL1) {
await this.state.l1IngestionService.init()
}
if (this.options.syncFromL2) {
await this.state.l2IngestionService.init()
}
}
protected async _start(): Promise<void> {
await Promise.all([
this.state.l1TransportServer.start(),
this.options.syncFromL1 ? this.state.l1IngestionService.start() : null,
this.options.syncFromL2 ? this.state.l2IngestionService.start() : null,
])
}
protected async _stop(): Promise<void> {
await Promise.all([
this.state.l1TransportServer.stop(),
this.options.syncFromL1 ? this.state.l1IngestionService.stop() : null,
this.options.syncFromL2 ? this.state.l2IngestionService.stop() : null,
])
await this.state.db.close()
}
}
/* Imports: External */
import * as dotenv from 'dotenv'
import Config from 'bcfg' // TODO: Add some types for bcfg if we get the chance.
/* Imports: Internal */
import { L1DataTransportService } from './main/service'
interface Bcfg {
load: (options: { env?: boolean; argv?: boolean }) => void
str: (name: string, defaultValue?: string) => string
uint: (name: string, defaultValue?: number) => number
bool: (name: string, defaultValue?: boolean) => boolean
}
;(async () => {
try {
dotenv.config()
const config: Bcfg = new Config('data-transport-layer')
config.load({
env: true,
argv: true,
})
const service = new L1DataTransportService({
dbPath: config.str('dbPath', './db'),
port: config.uint('serverPort', 7878),
hostname: config.str('serverHostname', 'localhost'),
confirmations: config.uint('confirmations', 35),
l1RpcProvider: config.str('l1RpcEndpoint'),
addressManager: config.str('addressManager'),
pollingInterval: config.uint('pollingInterval', 5000),
logsPerPollingInterval: config.uint('logsPerPollingInterval', 2000),
dangerouslyCatchAllErrors: config.bool(
'dangerouslyCatchAllErrors',
false
),
l2RpcProvider: config.str('l2RpcEndpoint'),
l2ChainId: config.uint('l2ChainId'),
syncFromL1: config.bool('syncFromL1', true),
syncFromL2: config.bool('syncFromL2', false),
showUnconfirmedTransactions: config.bool('syncFromL2', false),
transactionsPerPollingInterval: config.uint(
'transactionsPerPollingInterval',
1000
),
legacySequencerCompatibility: config.bool(
'legacySequencerCompatibility',
false
),
stopL2SyncAtBlock: config.uint('stopL2SyncAtBlock'),
})
await service.start()
} catch (err) {
console.error(
`Well, that's that. We ran into a fatal error. Here's the dump. Goodbye!`
)
throw err
}
})()
This diff is collapsed.
import {
EnqueueEntry,
StateRootBatchEntry,
StateRootEntry,
TransactionBatchEntry,
TransactionEntry,
} from './database-types'
export type EnqueueResponse = EnqueueEntry & {
ctcIndex: number | null
}
export interface TransactionResponse {
batch: TransactionBatchEntry
transaction: TransactionEntry
}
export interface TransactionBatchResponse {
batch: TransactionBatchEntry
transactions: TransactionEntry[]
}
export interface StateRootResponse {
batch: StateRootBatchEntry
stateRoot: StateRootEntry
}
export interface StateRootBatchResponse {
batch: StateRootBatchEntry
stateRoots: StateRootEntry[]
}
export interface ContextResponse {
blockNumber: number
timestamp: number
blockHash: string
}
export interface GasPriceResponse {
gasPrice: string
}
export type SyncingResponse =
| {
syncing: true
highestKnownTransactionIndex: number
currentTransactionIndex: number
}
| {
syncing: false
currentTransactionIndex: number
}
export interface DecodedSequencerBatchTransaction {
sig: {
r: string
s: string
v: number
}
gasLimit: number
gasPrice: number
nonce: number
target: string
data: string
type: number
}
export interface EnqueueEntry {
index: number
target: string
data: string
gasLimit: number
origin: string
blockNumber: number
timestamp: number
}
export interface TransactionEntry {
index: number
batchIndex: number
data: string
blockNumber: number
timestamp: number
gasLimit: number
target: string
origin: string
queueOrigin: 'sequencer' | 'l1'
queueIndex: number | null
type: 'EIP155' | 'ETH_SIGN' | null
decoded: DecodedSequencerBatchTransaction | null
confirmed: boolean
}
interface BatchEntry {
index: number
blockNumber: number
timestamp: number
submitter: string
size: number
root: string
prevTotalElements: number
extraData: string
l1TransactionHash: string
}
export type TransactionBatchEntry = BatchEntry
export type StateRootBatchEntry = BatchEntry
export interface StateRootEntry {
index: number
batchIndex: number
value: string
confirmed: boolean
}
import { JsonRpcProvider } from '@ethersproject/providers'
import { TransportDB } from '../db/transport-db'
import { TypedEthersEvent } from './event-types'
export type GetExtraDataHandler<TEventArgs, TExtraData> = (
event?: TypedEthersEvent<TEventArgs>,
l1RpcProvider?: JsonRpcProvider
) => Promise<TExtraData>
export type ParseEventHandler<TEventArgs, TExtraData, TParsedEvent> = (
event: TypedEthersEvent<TEventArgs>,
extraData: TExtraData
) => TParsedEvent
export type StoreEventHandler<TParsedEvent> = (
parsedEvent: TParsedEvent,
db: TransportDB
) => Promise<void>
export interface EventHandlerSet<TEventArgs, TExtraData, TParsedEvent> {
getExtraData: GetExtraDataHandler<TEventArgs, TExtraData>
parseEvent: ParseEventHandler<TEventArgs, TExtraData, TParsedEvent>
storeEvent: StoreEventHandler<TParsedEvent>
}
/* Imports: External */
import { ethers } from 'ethers'
export type TypedEthersEvent<T> = ethers.Event & {
args: T
}
export interface EventArgsAddressSet {
_name: string
_newAddress: string
}
export interface EventArgsTransactionEnqueued {
_l1TxOrigin: string
_target: string
_gasLimit: ethers.BigNumber
_data: string
_queueIndex: ethers.BigNumber
_timestamp: ethers.BigNumber
}
export interface EventArgsTransactionBatchAppended {
_batchIndex: ethers.BigNumber
_batchRoot: string
_batchSize: ethers.BigNumber
_prevTotalElements: ethers.BigNumber
_extraData: string
}
export interface EventArgsStateBatchAppended {
_batchIndex: ethers.BigNumber
_batchRoot: string
_batchSize: ethers.BigNumber
_prevTotalElements: ethers.BigNumber
_extraData: string
}
export interface EventArgsSequencerBatchAppended {
_startingQueueIndex: ethers.BigNumber
_numQueueElements: ethers.BigNumber
_totalElements: ethers.BigNumber
}
export * from './api-types'
export * from './database-types'
export * from './event-handler-types'
export * from './event-types'
import { toHexString } from '@eth-optimism/core-utils'
/**
* Basic timeout-based async sleep function.
* @param ms Number of milliseconds to sleep.
*/
export const sleep = async (ms: number): Promise<void> => {
return new Promise<void>((resolve) => {
setTimeout(resolve, ms)
})
}
export const assert = (condition: () => boolean, reason?: string) => {
try {
if (condition() === false) {
throw new Error(`Assertion failed: ${reason}`)
}
} catch (err) {
throw new Error(`Assertion failed: ${reason}\n${err}`)
}
}
export const toRpcHexString = (n: number): string => {
if (n === 0) {
return '0x0'
} else {
return '0x' + toHexString(n).slice(2).replace(/^0+/, '')
}
}
export const padHexString = (str: string, length: number): string => {
if (str.length === 2 + length * 2) {
return str
} else {
return '0x' + str.slice(2).padStart(length * 2, '0')
}
}
export const SEQUENCER_GAS_LIMIT = 8_000_000 // TODO: Remove and use value from event.
export const SEQUENCER_ENTRYPOINT_ADDRESS =
'0x4200000000000000000000000000000000000005'
/* Imports: External */
import { constants, Contract, Signer } from 'ethers'
import { JsonRpcProvider } from '@ethersproject/providers'
import { getContractInterface } from '@eth-optimism/contracts/build/src/contract-defs'
export const loadContract = (
name: string,
address: string,
provider: JsonRpcProvider
): Contract => {
return new Contract(address, getContractInterface(name) as any, provider)
}
export const loadProxyFromManager = async (
name: string,
proxy: string,
Lib_AddressManager: Contract,
provider: JsonRpcProvider
): Promise<Contract> => {
const address = await Lib_AddressManager.getAddress(proxy)
if (address === constants.AddressZero) {
throw new Error(
`Lib_AddressManager does not have a record for a contract named: ${proxy}`
)
}
return loadContract(name, address, provider)
}
export interface OptimismContracts {
Lib_AddressManager: Contract
OVM_StateCommitmentChain: Contract
OVM_CanonicalTransactionChain: Contract
OVM_ExecutionManager: Contract
}
export const loadOptimismContracts = async (
l1RpcProvider: JsonRpcProvider,
addressManagerAddress: string,
signer?: Signer
): Promise<OptimismContracts> => {
const Lib_AddressManager = loadContract(
'Lib_AddressManager',
addressManagerAddress,
l1RpcProvider
)
const inputs = [
{
name: 'OVM_StateCommitmentChain',
interface: 'iOVM_StateCommitmentChain',
},
{
name: 'OVM_CanonicalTransactionChain',
interface: 'iOVM_CanonicalTransactionChain',
},
{
name: 'OVM_ExecutionManager',
interface: 'iOVM_ExecutionManager',
},
]
const contracts = {}
for (const input of inputs) {
contracts[input.name] = await loadProxyFromManager(
input.interface,
input.name,
Lib_AddressManager,
l1RpcProvider
)
if (signer) {
contracts[input.name] = contracts[input.name].connect(signer)
}
}
contracts['Lib_AddressManager'] = Lib_AddressManager
// TODO: sorry
return contracts as OptimismContracts
}
export * from './common'
export * from './constants'
export * from './contracts'
export * from './validation'
import { fromHexString } from '@eth-optimism/core-utils'
import * as url from 'url'
export const validators = {
isBoolean: (val: any): boolean => {
return typeof val === 'boolean'
},
isString: (val: any): boolean => {
return typeof val === 'string'
},
isHexString: (val: any): boolean => {
return (
validators.isString(val) &&
val.startsWith('0x') &&
fromHexString(val).length === (val.length - 2) / 2
)
},
isAddress: (val: any): boolean => {
return validators.isHexString(val) && val.length === 42
},
isInteger: (val: any): boolean => {
return Number.isInteger(val)
},
isUrl: (val: any): boolean => {
try {
const parsed = new url.URL(val)
return (
parsed.protocol === 'ws:' ||
parsed.protocol === 'http:' ||
parsed.protocol === 'https:'
)
} catch (err) {
return false
}
},
isJsonRpcProvider: (val: any): boolean => {
return val.ready !== undefined
},
isLevelUP: (val: any): boolean => {
// TODO: Fix?
return val && val.db
},
}
import { HardhatUserConfig } from 'hardhat/config'
const config: HardhatUserConfig = {
// All paths relative to ** this file **.
paths: {
tests: '../../test',
cache: '../temp/cache',
artifacts: '../temp/artifacts',
},
}
export default config
/* External Imports */
import chai = require('chai')
import Mocha from 'mocha'
import chaiAsPromised from 'chai-as-promised'
// Chai plugins go here.
chai.use(chaiAsPromised)
const should = chai.should()
const expect = chai.expect
export { should, expect, Mocha }
import { BigNumber, ethers } from 'ethers'
import { expect } from '../../../../setup'
import {
SequencerBatchAppendedExtraData,
validateBatchTransaction,
handleEventsSequencerBatchAppended,
} from '../../../../../src/services/l1-ingestion/handlers/sequencer-batch-appended'
import { l1TransactionData } from '../../../examples/l1-data'
import { blocksOnL2 } from '../../../examples/l2-data'
describe('Event Handlers: OVM_CanonicalTransactionChain.SequencerBatchAppended', () => {
describe('validateBatchTransaction', () => {
it('should mark a transaction as invalid if the type is null', () => {
const input1: [any, any] = [null, null]
const output1 = validateBatchTransaction(...input1)
const expected1 = false
expect(output1).to.equal(expected1)
})
it('should mark a transaction as invalid if the type is not EIP155 or ETH_SIGN', () => {
const input1: [any, any] = ['SOME_RANDOM_TYPE', null]
const output1 = validateBatchTransaction(...input1)
const expected1 = false
expect(output1).to.equal(expected1)
})
describe('when the transaction type is EIP155 or ETH_SIGN', () => {
it('should mark a transaction as valid if the `v` parameter is 0', () => {
// CTC index 23159
const input1: [any, any] = [
'EIP155',
{
sig: {
v: 0,
},
},
]
const output1 = validateBatchTransaction(...input1)
const expected1 = true
expect(output1).to.equal(expected1)
})
it('should mark a transaction as valid if the `v` parameter is 1', () => {
// CTC index 23159
const input1: [any, any] = [
'EIP155',
{
sig: {
v: 1,
},
},
]
const output1 = validateBatchTransaction(...input1)
const expected1 = true
expect(output1).to.equal(expected1)
})
it('should mark a transaction as invalid if the `v` parameter is greater than 1', () => {
// CTC index 23159
const input1: [any, any] = [
'EIP155',
{
sig: {
v: 2,
},
},
]
const output1 = validateBatchTransaction(...input1)
const expected1 = false
expect(output1).to.equal(expected1)
})
})
describe('regressions', () => {
it('should catch the invalid transaction', () => {
// CTC index 23159
const input1: [any, any] = [
'EIP155',
{
sig: {
r:
'0x0fbef2080fadc4198ee0d6027e2eb70799d3418574cc085c34a14dcefe14d5d3',
s:
'0x3bf394a7cb2aca6790e67382f782a406aefce7553212db52b54a4e087c2195ad',
v: 56,
},
gasLimit: 8000000,
gasPrice: 0,
nonce: 0,
target: '0x1111111111111111111111111111111111111111',
data: '0x1234',
},
]
const output1 = validateBatchTransaction(...input1)
const expected1 = false
expect(output1).to.equal(expected1)
})
})
})
describe('handleEventsSequencerBatchAppended.parseEvent', () => {
// This tests the behavior of parsing a real mainnet transaction,
// so it will break if the encoding scheme changes.
// Transaction and extra data from
// https://etherscan.io/tx/0x6effe006836b841205ace4d99d7ae1b74ee96aac499a3f358b97fccd32ee9af2
const exampleExtraData = {
timestamp: 1614862375,
blockNumber: 11969713,
submitter: '0xfd7d4de366850c08ee2cba32d851385a3071ec8d',
l1TransactionHash:
'0x6effe006836b841205ace4d99d7ae1b74ee96aac499a3f358b97fccd32ee9af2',
gasLimit: 548976,
prevTotalElements: BigNumber.from(73677),
batchIndex: BigNumber.from(743),
batchSize: BigNumber.from(101),
batchRoot:
'10B99425FB53AD7D40A939205C0F7B35CBB89AB4D67E7AE64BDAC5F1073943B4',
batchExtraData: '',
}
it('should correctly parse a mainnet transaction', async () => {
const input1: [any, SequencerBatchAppendedExtraData] = [
{
args: {
_startingQueueIndex: ethers.constants.Zero,
_numQueueElements: ethers.constants.Zero,
_totalElements: ethers.constants.Zero,
},
},
{
l1TransactionData,
...exampleExtraData,
},
]
const output1 = await handleEventsSequencerBatchAppended.parseEvent(
...input1
)
const batchEntry = output1.transactionBatchEntry
expect(batchEntry.index).to.eq(exampleExtraData.batchIndex.toNumber())
expect(batchEntry.root).to.eq(exampleExtraData.batchRoot)
expect(batchEntry.size).to.eq(exampleExtraData.batchSize.toNumber())
expect(batchEntry.prevTotalElements).to.eq(
exampleExtraData.prevTotalElements.toNumber()
)
expect(batchEntry.extraData).to.eq(exampleExtraData.batchExtraData)
expect(batchEntry.blockNumber).to.eq(exampleExtraData.blockNumber)
expect(batchEntry.timestamp).to.eq(exampleExtraData.timestamp)
expect(batchEntry.submitter).to.eq(exampleExtraData.submitter)
expect(batchEntry.l1TransactionHash).to.eq(
exampleExtraData.l1TransactionHash
)
// Expected transaction entry results based on mainnet data
// Source: https://ethtx.info/mainnet/0x6effe006836b841205ace4d99d7ae1b74ee96aac499a3f358b97fccd32ee9af2
const txEntries = output1.transactionEntries
expect(txEntries).to.have.length(101)
expect(txEntries.every((t) => t.queueOrigin === 'sequencer' || 'l1')).to
.be.true
// Sequencer transactions are decoded, but l1 transactions are not
txEntries.forEach((tx, i) => {
if (tx.queueOrigin === 'l1') {
expect(tx.decoded).to.be.null
} else {
const l2Tx = blocksOnL2[i].transactions[0]
expect(tx.decoded.data).to.equal(l2Tx.data)
expect(tx.decoded.target).to.equal(l2Tx.to.toLowerCase())
expect(tx.decoded.nonce).to.equal(l2Tx.nonce)
expect(tx.decoded.gasLimit).to.equal(
BigNumber.from(l2Tx.gasLimit.hex).toNumber()
)
expect(tx.decoded.gasPrice).to.equal(
BigNumber.from(l2Tx.gasPrice.hex).toNumber()
)
}
})
})
it('should error on malformed transaction data', async () => {
const input1: [any, SequencerBatchAppendedExtraData] = [
{
args: {
_startingQueueIndex: ethers.constants.Zero,
_numQueueElements: ethers.constants.Zero,
_totalElements: ethers.constants.Zero,
},
},
{
l1TransactionData: '0x00000',
...exampleExtraData,
},
]
expect(() => {
handleEventsSequencerBatchAppended.parseEvent(...input1)
}).to.throw(
`Block ${input1[1].blockNumber} transaction data is invalid for decoding: ${input1[1].l1TransactionData} , ` +
`converted buffer length is < 12.`
)
})
})
})
import { ethers, BigNumber } from 'ethers'
import { expect } from '../../../../setup'
import { handleEventsTransactionEnqueued } from '../../../../../src/services/l1-ingestion/handlers/transaction-enqueued'
const MAX_ITERATIONS = 128
describe('Event Handlers: OVM_CanonicalTransactionChain.TransactionEnqueued', () => {
describe('getExtraData', () => {
it('should return null', async () => {
const output1 = await handleEventsTransactionEnqueued.getExtraData()
const expected1 = null
expect(output1).to.equal(expected1)
})
})
describe('parseEvent', () => {
// TODO: Honestly this is the simplest `parseEvent` function we have and there isn't much logic
// to test. We could add a lot more tests that guarantee the correctness of the provided input,
// but it's probably better to get wider test coverage first.
it('should have a ctcIndex equal to null', () => {
const input1: [any, any] = [
{
blockNumber: 0,
args: {
_queueIndex: ethers.constants.Zero,
_gasLimit: ethers.constants.Zero,
_timestamp: ethers.constants.Zero,
},
},
null,
]
const output1 = handleEventsTransactionEnqueued.parseEvent(...input1)
const expected1 = null
expect(output1).to.have.property('ctcIndex', expected1)
})
it('should have a blockNumber equal to the integer value of the blockNumber parameter', () => {
for (
let i = 0;
i < Number.MAX_SAFE_INTEGER;
i += Math.floor(Number.MAX_SAFE_INTEGER / MAX_ITERATIONS)
) {
const input1: [any, any] = [
{
blockNumber: i,
args: {
_queueIndex: ethers.constants.Zero,
_gasLimit: ethers.constants.Zero,
_timestamp: ethers.constants.Zero,
},
},
null,
]
const output1 = handleEventsTransactionEnqueued.parseEvent(...input1)
const expected1 = BigNumber.from(i).toNumber()
expect(output1).to.have.property('blockNumber', expected1)
}
})
it('should have an index equal to the integer value of the _queueIndex argument', () => {
for (
let i = 0;
i < Number.MAX_SAFE_INTEGER;
i += Math.floor(Number.MAX_SAFE_INTEGER / MAX_ITERATIONS)
) {
const input1: [any, any] = [
{
blockNumber: 0,
args: {
_queueIndex: BigNumber.from(i),
_gasLimit: ethers.constants.Zero,
_timestamp: ethers.constants.Zero,
},
},
null,
]
const output1 = handleEventsTransactionEnqueued.parseEvent(...input1)
const expected1 = BigNumber.from(i).toNumber()
expect(output1).to.have.property('index', expected1)
}
})
it('should have a gasLimit equal to the integer value of the _gasLimit argument', () => {
for (
let i = 0;
i < Number.MAX_SAFE_INTEGER;
i += Math.floor(Number.MAX_SAFE_INTEGER / MAX_ITERATIONS)
) {
const input1: [any, any] = [
{
blockNumber: 0,
args: {
_queueIndex: ethers.constants.Zero,
_gasLimit: BigNumber.from(i),
_timestamp: ethers.constants.Zero,
},
},
null,
]
const output1 = handleEventsTransactionEnqueued.parseEvent(...input1)
const expected1 = BigNumber.from(i).toNumber()
expect(output1).to.have.property('gasLimit', expected1)
}
})
it('should have a timestamp equal to the integer value of the _timestamp argument', () => {
for (
let i = 0;
i < Number.MAX_SAFE_INTEGER;
i += Math.floor(Number.MAX_SAFE_INTEGER / MAX_ITERATIONS)
) {
const input1: [any, any] = [
{
blockNumber: 0,
args: {
_queueIndex: ethers.constants.Zero,
_gasLimit: ethers.constants.Zero,
_timestamp: BigNumber.from(i),
},
},
null,
]
const output1 = handleEventsTransactionEnqueued.parseEvent(...input1)
const expected1 = BigNumber.from(i).toNumber()
expect(output1).to.have.property('timestamp', expected1)
}
})
})
describe.skip('storeEvent', () => {
// TODO: I don't know the best way to test this, plus it's just a single line. Going to ignore
// it for now.
})
})
import { expect } from '../../../../setup'
import { l2Block } from '../../../examples/l2-data'
import { handleSequencerBlock } from '../../../../../src/services/l2-ingestion/handlers/transaction'
describe('Handlers: handleSequencerBlock', () => {
describe('parseBlock', () => {
it('should correctly extract key fields from an L2 mainnet transaction', async () => {
const input1: [any, number] = [l2Block, 420]
const output1 = await handleSequencerBlock.parseBlock(...input1)
expect(output1.stateRootEntry.value).to.equal(l2Block.stateRoot)
expect(output1.transactionEntry.decoded.data).to.equal(
l2Block.transactions[0].input
)
})
})
})
{
"extends": "../../tsconfig.build.json",
"compilerOptions": {
"outDir": "./dist"
},
"include": [
"src/**/*"
],
}
{
"extends": "../../tsconfig.json"
}
{
"extends": "../../tslint.base.json"
}
...@@ -21,6 +21,7 @@ ...@@ -21,6 +21,7 @@
"no-submodule-imports": false, "no-submodule-imports": false,
"no-unused-expression": false, "no-unused-expression": false,
"object-literal-sort-keys": false, "object-literal-sort-keys": false,
"prefer-conditional-expression": false,
"ordered-imports": false, "ordered-imports": false,
"semicolon": false, "semicolon": false,
"variable-name": false, "variable-name": false,
......
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment