Commit f4cc4e58 authored by mergify[bot]'s avatar mergify[bot] Committed by GitHub

Merge branch 'develop' into willc/api-listen-fix

parents 4d62a590 0d1545d9
...@@ -88,7 +88,6 @@ jobs: ...@@ -88,7 +88,6 @@ jobs:
- "packages/contracts-governance/node_modules" - "packages/contracts-governance/node_modules"
- "packages/contracts-periphery/node_modules" - "packages/contracts-periphery/node_modules"
- "packages/core-utils/node_modules" - "packages/core-utils/node_modules"
- "packages/data-transport-layer/node_modules"
- "packages/drippie-mon/node_modules" - "packages/drippie-mon/node_modules"
- "packages/fault-detector/node_modules" - "packages/fault-detector/node_modules"
- "packages/hardhat-deploy-config/node_modules" - "packages/hardhat-deploy-config/node_modules"
...@@ -672,10 +671,6 @@ jobs: ...@@ -672,10 +671,6 @@ jobs:
name: Check core-utils name: Check core-utils
command: npx depcheck command: npx depcheck
working_directory: packages/core-utils working_directory: packages/core-utils
- run:
name: Check data-transport-layer
command: npx depcheck
working_directory: packages/data-transport-layer
- run: - run:
name: Check sdk name: Check sdk
command: npx depcheck command: npx depcheck
...@@ -1180,13 +1175,6 @@ workflows: ...@@ -1180,13 +1175,6 @@ workflows:
dependencies: "(contracts|contracts-bedrock|core-utils|hardhat-deploy-config)" dependencies: "(contracts|contracts-bedrock|core-utils|hardhat-deploy-config)"
requires: requires:
- yarn-monorepo - yarn-monorepo
- js-lint-test:
name: dtl-tests
coverage_flag: dtl-tests
package_name: data-transport-layer
dependencies: "(common-ts|contracts|core-utils)"
requires:
- yarn-monorepo
- js-lint-test: - js-lint-test:
name: chain-mon-tests name: chain-mon-tests
coverage_flag: chain-mon-tests coverage_flag: chain-mon-tests
......
...@@ -6,7 +6,6 @@ ...@@ -6,7 +6,6 @@
/packages/contracts-bedrock @ethereum-optimism/contract-reviewers /packages/contracts-bedrock @ethereum-optimism/contract-reviewers
/packages/contracts-periphery @ethereum-optimism/contract-reviewers /packages/contracts-periphery @ethereum-optimism/contract-reviewers
/packages/core-utils @ethereum-optimism/legacy-reviewers /packages/core-utils @ethereum-optimism/legacy-reviewers
/packages/data-transport-layer @ethereum-optimism/legacy-reviewers
/packages/chain-mon @smartcontracts /packages/chain-mon @smartcontracts
/packages/fault-detector @ethereum-optimism/devxpod /packages/fault-detector @ethereum-optimism/devxpod
/packages/hardhat-deploy-config @ethereum-optimism/legacy-reviewers /packages/hardhat-deploy-config @ethereum-optimism/legacy-reviewers
......
--- ---
C-Protocol-Critical: C-Protocol-Critical:
- 'packages/data-transport-layer/**/*.ts'
- 'packages/contracts-bedrock/**/*.sol' - 'packages/contracts-bedrock/**/*.sol'
- 'l2geth/**/*.go' - 'l2geth/**/*.go'
...@@ -21,7 +21,6 @@ jobs: ...@@ -21,7 +21,6 @@ jobs:
balance-mon: ${{ steps.packages.outputs.balance-mon }} balance-mon: ${{ steps.packages.outputs.balance-mon }}
drippie-mon: ${{ steps.packages.outputs.drippie-mon }} drippie-mon: ${{ steps.packages.outputs.drippie-mon }}
wd-mon: ${{ steps.packages.outputs.wd-mon }} wd-mon: ${{ steps.packages.outputs.wd-mon }}
data-transport-layer: ${{ steps.packages.outputs.data-transport-layer }}
contracts: ${{ steps.packages.outputs.contracts }} contracts: ${{ steps.packages.outputs.contracts }}
contracts-bedrock: ${{ steps.packages.outputs.contracts-bedrock }} contracts-bedrock: ${{ steps.packages.outputs.contracts-bedrock }}
replica-healthcheck: ${{ steps.packages.outputs.replica-healthcheck }} replica-healthcheck: ${{ steps.packages.outputs.replica-healthcheck }}
...@@ -284,33 +283,6 @@ jobs: ...@@ -284,33 +283,6 @@ jobs:
push: true push: true
tags: ethereumoptimism/wd-mon:${{ needs.canary-publish.outputs.canary-docker-tag }} tags: ethereumoptimism/wd-mon:${{ needs.canary-publish.outputs.canary-docker-tag }}
data-transport-layer:
name: Publish Data Transport Layer Version ${{ needs.canary-publish.outputs.canary-docker-tag }}
needs: canary-publish
if: needs.canary-publish.outputs.data-transport-layer != ''
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Login to Docker Hub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_ACCESS_TOKEN_USERNAME }}
password: ${{ secrets.DOCKERHUB_ACCESS_TOKEN_SECRET }}
- name: Build and push
uses: docker/build-push-action@v2
with:
context: .
file: ./ops/docker/Dockerfile.packages
target: data-transport-layer
push: true
tags: ethereumoptimism/data-transport-layer:${{ needs.canary-publish.outputs.canary-docker-tag }}
contracts: contracts:
name: Publish Deployer Version ${{ needs.canary-publish.outputs.canary-docker-tag }} name: Publish Deployer Version ${{ needs.canary-publish.outputs.canary-docker-tag }}
needs: canary-publish needs: canary-publish
......
...@@ -20,7 +20,6 @@ jobs: ...@@ -20,7 +20,6 @@ jobs:
balance-mon: ${{ steps.packages.outputs.drippie-mon }} balance-mon: ${{ steps.packages.outputs.drippie-mon }}
drippie-mon: ${{ steps.packages.outputs.drippie-mon }} drippie-mon: ${{ steps.packages.outputs.drippie-mon }}
wd-mon: ${{ steps.packages.outputs.wd-mon }} wd-mon: ${{ steps.packages.outputs.wd-mon }}
data-transport-layer: ${{ steps.packages.outputs.data-transport-layer }}
contracts: ${{ steps.packages.outputs.contracts }} contracts: ${{ steps.packages.outputs.contracts }}
contracts-bedrock: ${{ steps.packages.outputs.contracts-bedrock }} contracts-bedrock: ${{ steps.packages.outputs.contracts-bedrock }}
balance-monitor: ${{ steps.packages.outputs.balance-monitor }} balance-monitor: ${{ steps.packages.outputs.balance-monitor }}
...@@ -333,33 +332,6 @@ jobs: ...@@ -333,33 +332,6 @@ jobs:
push: true push: true
tags: ethereumoptimism/drippie-mon:${{ needs.release.outputs.drippie-mon }},ethereumoptimism/drippie-mon:latest tags: ethereumoptimism/drippie-mon:${{ needs.release.outputs.drippie-mon }},ethereumoptimism/drippie-mon:latest
data-transport-layer:
name: Publish Data Transport Layer Version ${{ needs.release.outputs.data-transport-layer }}
needs: release
if: needs.release.outputs.data-transport-layer != ''
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Login to Docker Hub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_ACCESS_TOKEN_USERNAME }}
password: ${{ secrets.DOCKERHUB_ACCESS_TOKEN_SECRET }}
- name: Build and push
uses: docker/build-push-action@v2
with:
context: .
file: ./ops/docker/Dockerfile.packages
target: data-transport-layer
push: true
tags: ethereumoptimism/data-transport-layer:${{ needs.release.outputs.data-transport-layer }},ethereumoptimism/data-transport-layer:latest
contracts: contracts:
name: Publish Deployer Version ${{ needs.release.outputs.contracts }} name: Publish Deployer Version ${{ needs.release.outputs.contracts }}
needs: release needs: release
......
...@@ -23,8 +23,6 @@ packages/contracts-periphery/@openzeppelin* ...@@ -23,8 +23,6 @@ packages/contracts-periphery/@openzeppelin*
packages/contracts-periphery/hardhat* packages/contracts-periphery/hardhat*
packages/contracts-periphery/forge-artifacts* packages/contracts-periphery/forge-artifacts*
packages/data-transport-layer/db
packages/contracts-bedrock/deployments/devnetL1 packages/contracts-bedrock/deployments/devnetL1
packages/contracts-bedrock/deployments/anvil packages/contracts-bedrock/deployments/anvil
......
...@@ -20,10 +20,6 @@ ...@@ -20,10 +20,6 @@
"directory": "packages/contracts-periphery", "directory": "packages/contracts-periphery",
"changeProcessCWD": true "changeProcessCWD": true
}, },
{
"directory": "packages/data-transport-layer",
"changeProcessCWD": true
},
{ {
"directory": "packages/chain-mon", "directory": "packages/chain-mon",
"changeProcessCWD": true "changeProcessCWD": true
......
...@@ -124,7 +124,6 @@ This will build the following containers: ...@@ -124,7 +124,6 @@ This will build the following containers:
* [`l1_chain`](https://hub.docker.com/r/ethereumoptimism/hardhat): simulated L1 chain using hardhat-evm as a backend * [`l1_chain`](https://hub.docker.com/r/ethereumoptimism/hardhat): simulated L1 chain using hardhat-evm as a backend
* [`deployer`](https://hub.docker.com/r/ethereumoptimism/deployer): process that deploys L1 smart contracts to the L1 chain * [`deployer`](https://hub.docker.com/r/ethereumoptimism/deployer): process that deploys L1 smart contracts to the L1 chain
* [`dtl`](https://hub.docker.com/r/ethereumoptimism/data-transport-layer): service that indexes transaction data from the L1 chain
* [`l2geth`](https://hub.docker.com/r/ethereumoptimism/l2geth): L2 geth node running in Sequencer mode * [`l2geth`](https://hub.docker.com/r/ethereumoptimism/l2geth): L2 geth node running in Sequencer mode
* [`verifier`](https://hub.docker.com/r/ethereumoptimism/go-ethereum): L2 geth node running in Verifier mode * [`verifier`](https://hub.docker.com/r/ethereumoptimism/go-ethereum): L2 geth node running in Verifier mode
* [`relayer`](https://hub.docker.com/r/ethereumoptimism/message-relayer): helper process that relays messages between L1 and L2 * [`relayer`](https://hub.docker.com/r/ethereumoptimism/message-relayer): helper process that relays messages between L1 and L2
......
...@@ -54,7 +54,6 @@ Refer to the Directory Structure section below to understand which packages are ...@@ -54,7 +54,6 @@ Refer to the Directory Structure section below to understand which packages are
│ ├── <a href="./packages/contracts-bedrock">contracts-bedrock</a>: Bedrock smart contracts. │ ├── <a href="./packages/contracts-bedrock">contracts-bedrock</a>: Bedrock smart contracts.
│ ├── <a href="./packages/contracts-periphery">contracts-periphery</a>: Peripheral contracts for Optimism │ ├── <a href="./packages/contracts-periphery">contracts-periphery</a>: Peripheral contracts for Optimism
│ ├── <a href="./packages/core-utils">core-utils</a>: Low-level utilities that make building Optimism easier │ ├── <a href="./packages/core-utils">core-utils</a>: Low-level utilities that make building Optimism easier
│ ├── <a href="./packages/data-transport-layer">data-transport-layer</a>: Service for indexing Optimism-related L1 data
│ ├── <a href="./packages/chain-mon">chain-mon</a>: Chain monitoring services │ ├── <a href="./packages/chain-mon">chain-mon</a>: Chain monitoring services
│ ├── <a href="./packages/fault-detector">fault-detector</a>: Service for detecting Sequencer faults │ ├── <a href="./packages/fault-detector">fault-detector</a>: Service for detecting Sequencer faults
│ ├── <a href="./packages/message-relayer">message-relayer</a>: Tool for automatically relaying L1<>L2 messages in development │ ├── <a href="./packages/message-relayer">message-relayer</a>: Tool for automatically relaying L1<>L2 messages in development
...@@ -83,7 +82,6 @@ Refer to the Directory Structure section below to understand which packages are ...@@ -83,7 +82,6 @@ Refer to the Directory Structure section below to understand which packages are
│ ├── <a href="./packages/common-ts">common-ts</a>: Common tools for building apps in TypeScript │ ├── <a href="./packages/common-ts">common-ts</a>: Common tools for building apps in TypeScript
│ ├── <a href="./packages/contracts-periphery">contracts-periphery</a>: Peripheral contracts for Optimism │ ├── <a href="./packages/contracts-periphery">contracts-periphery</a>: Peripheral contracts for Optimism
│ ├── <a href="./packages/core-utils">core-utils</a>: Low-level utilities that make building Optimism easier │ ├── <a href="./packages/core-utils">core-utils</a>: Low-level utilities that make building Optimism easier
│ ├── <a href="./packages/data-transport-layer">data-transport-layer</a>: Service for indexing Optimism-related L1 data
│ ├── <a href="./packages/chain-mon">chain-mon</a>: Chain monitoring services │ ├── <a href="./packages/chain-mon">chain-mon</a>: Chain monitoring services
│ ├── <a href="./packages/fault-detector">fault-detector</a>: Service for detecting Sequencer faults │ ├── <a href="./packages/fault-detector">fault-detector</a>: Service for detecting Sequencer faults
│ ├── <a href="./packages/message-relayer">message-relayer</a>: Tool for automatically relaying L1<>L2 messages in development │ ├── <a href="./packages/message-relayer">message-relayer</a>: Tool for automatically relaying L1<>L2 messages in development
......
...@@ -88,11 +88,6 @@ FROM base as deployer-bedrock ...@@ -88,11 +88,6 @@ FROM base as deployer-bedrock
WORKDIR /opt/optimism/packages/contracts-bedrock WORKDIR /opt/optimism/packages/contracts-bedrock
CMD ["yarn", "run", "deploy"] CMD ["yarn", "run", "deploy"]
FROM base as data-transport-layer
WORKDIR /opt/optimism/packages/data-transport-layer
COPY ./ops/scripts/dtl.sh .
CMD ["node", "dist/src/services/run.js"]
FROM base as message-relayer FROM base as message-relayer
WORKDIR /opt/optimism/packages/message-relayer WORKDIR /opt/optimism/packages/message-relayer
......
ignores: [
"@babel/eslint-parser",
"@types/level",
"@typescript-eslint/parser",
"eslint-plugin-import",
"eslint-plugin-unicorn",
"eslint-plugin-jsdoc",
"eslint-plugin-prefer-arrow",
"eslint-plugin-react",
"@typescript-eslint/eslint-plugin",
"eslint-config-prettier",
"eslint-plugin-prettier",
"chai"
]
# General options
DATA_TRANSPORT_LAYER__NODE_ENV=development
# Leave blank during local development
DATA_TRANSPORT_LAYER__ETH_NETWORK_NAME=
DATA_TRANSPORT_LAYER__DB_PATH=./db
DATA_TRANSPORT_LAYER__ADDRESS_MANAGER=
DATA_TRANSPORT_LAYER__POLLING_INTERVAL=5000
DATA_TRANSPORT_LAYER__DANGEROUSLY_CATCH_ALL_ERRORS=true
DATA_TRANSPORT_LAYER__CONFIRMATIONS=12
# Server options
DATA_TRANSPORT_LAYER__SERVER_HOSTNAME=localhost
DATA_TRANSPORT_LAYER__SERVER_PORT=7878
# Set to "true" if you want to sync confirmed transactions from L1 (Ethereum).
# You probably want to set this to "true".
DATA_TRANSPORT_LAYER__SYNC_FROM_L1=true
DATA_TRANSPORT_LAYER__L1_RPC_ENDPOINT=
DATA_TRANSPORT_LAYER__LOGS_PER_POLLING_INTERVAL=2000
# Set to "true" if you want to sync unconfirmed transactions from a sequencer.
# Make sure to fill in the below values if you intend to do so.
DATA_TRANSPORT_LAYER__SYNC_FROM_L2=false
DATA_TRANSPORT_LAYER__L2_RPC_ENDPOINT=
DATA_TRANSPORT_LAYER__TRANSACTIONS_PER_POLLING_INTERVAL=1000
DATA_TRANSPORT_LAYER__L2_CHAIN_ID=69
DATA_TRANSPORT_LAYER__LEGACY_SEQUENCER_COMPATIBILITY=false
# Monitoring
# Leave the SENTRY_DSN variable unset during local development
DATA_TRANSPORT_LAYER__USE_SENTRY=
DATA_TRANSPORT_LAYER__SENTRY_DSN=
DATA_TRANSPORT_LAYER__SENTRY_TRACE_RATE=
DATA_TRANSPORT_LAYER__ENABLE_METRICS=
module.exports = {
extends: '../../.eslintrc.js',
}
module.exports = {
...require('../../.prettierrc.js'),
};
\ No newline at end of file
This diff is collapsed.
(The MIT License)
Copyright 2020-2021 Optimism
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
This diff is collapsed.
This diff is collapsed.
{
"private": true,
"name": "@eth-optimism/data-transport-layer",
"version": "0.5.56",
"description": "[Optimism] Service for shuttling data from L1 into L2",
"main": "dist/index",
"types": "dist/index",
"files": [
"dist/index"
],
"scripts": {
"clean": "rimraf ./dist ./tsconfig.tsbuildinfo",
"clean:db": "rimraf ./db",
"lint": "yarn run lint:fix && yarn run lint:check",
"lint:fix": "yarn lint:check --fix",
"lint:check": "eslint . --max-warnings=0",
"start": "ts-node ./src/services/run.ts",
"start:local": "ts-node ./src/services/run.ts | pino-pretty",
"test": "hardhat --config test/config/hardhat.config.ts test",
"test:coverage": "nyc hardhat --config test/config/hardhat.config.ts test && nyc merge .nyc_output coverage.json",
"build": "tsc -p tsconfig.json",
"pre-commit": "lint-staged"
},
"keywords": [
"optimism",
"ethereum",
"data",
"transport",
"layer"
],
"homepage": "https://github.com/ethereum-optimism/optimism/tree/develop/packages/data-transport-layer#readme",
"license": "MIT",
"author": "Optimism PBC",
"repository": {
"type": "git",
"url": "https://github.com/ethereum-optimism/optimism.git"
},
"dependencies": {
"@eth-optimism/common-ts": "0.8.1",
"@eth-optimism/contracts": "0.6.0",
"@eth-optimism/core-utils": "0.12.0",
"@ethersproject/providers": "^5.7.0",
"@ethersproject/transactions": "^5.7.0",
"@sentry/node": "^6.3.1",
"@sentry/tracing": "^6.3.1",
"@types/express": "^4.17.12",
"axios": "^0.21.1",
"bcfg": "^0.1.6",
"bfj": "^7.0.2",
"cors": "^2.8.5",
"dotenv": "^10.0.0",
"ethers": "^5.7.0",
"express": "^4.17.1",
"express-prom-bundle": "^6.3.6",
"level6": "npm:level@^6.0.1",
"levelup": "^4.4.0"
},
"devDependencies": {
"@types/cors": "^2.8.9",
"@types/levelup": "^4.3.0",
"@types/level": "^6.0.1",
"bfj": "^7.0.2",
"chai-as-promised": "^7.1.1",
"hardhat": "^2.9.6",
"mocha": "^8.4.0",
"pino-pretty": "^4.7.1",
"prettier": "^2.8.0",
"prom-client": "^13.1.0",
"rimraf": "^3.0.2",
"ts-node": "^10.9.1"
}
}
export * from './chain-constants'
export * from './patch-contexts'
export const PATCH_CONTEXTS = {
10: {
2817218: 1643139411,
2817287: 1643139718,
2817898: 1643140952,
2818512: 1643141859,
2818984: 1643142762,
2819864: 1643144275,
2820902: 1643146079,
2821157: 1643146389,
2821170: 1643146389,
2821339: 1643146689,
2821772: 1643147604,
2821814: 1643147909,
2821952: 1643147909,
2822262: 1643148824,
2822342: 1643149130,
2822425: 1643149130,
2822602: 1643149430,
2822742: 1643149733,
2822987: 1643150660,
2822999: 1643150660,
2823039: 1643150964,
2823046: 1643150964,
2823055: 1643150964,
2823096: 1643151269,
2823205: 1643151572,
2823260: 1643151572,
2823306: 1643151572,
2823322: 1643151572,
2823413: 1643151872,
2823419: 1643151872,
2823460: 1643151872,
2823561: 1643152174,
2823592: 1643152174,
2824036: 1643152774,
2824050: 1643153075,
2824107: 1643153075,
2824247: 1643153376,
2832642: 1643173416,
2835330: 1643181396,
2838173: 1643188371,
2838174: 1643188371,
2838175: 1643188371,
2840388: 1643192601,
2844171: 1643202366,
2845370: 1643204181,
2845931: 1643205096,
2846484: 1643205696,
2894118: 1643281866,
2894119: 1643281866,
2959506: 1643399826,
2967959: 1643419611,
2971530: 1643432181,
2974571: 1643443881,
2981176: 1643465226,
2984205: 1643470986,
2995760: 1643498166,
2996847: 1643501211,
2997086: 1643501811,
2997087: 1643501811,
2997569: 1643503026,
2998970: 1643506101,
3000041: 1643510376,
3000042: 1643510376,
3000973: 1643514306,
3001008: 1643514606,
3001009: 1643514606,
3002529: 1643520081,
3008446: 1643541501,
3009141: 1643543016,
3012287: 1643551521,
3012348: 1643551821,
3022052: 1643574336,
3042815: 1643624616,
3043000: 1643625516,
3060328: 1643656446,
3060471: 1643656746,
3064982: 1643667996,
3070655: 1643683461,
},
}
/* Imports: External */
import { LevelUp } from 'levelup'
import { BigNumber } from 'ethers'
export class SimpleDB {
constructor(public db: LevelUp) {}
public async get<TEntry>(key: string, index: number): Promise<TEntry | null> {
try {
// TODO: Better checks here.
return JSON.parse(await this.db.get(this._makeKey(key, index)))
} catch (err) {
return null
}
}
public async range<TEntry>(
key: string,
startIndex: number,
endIndex: number
): Promise<TEntry[] | []> {
try {
return new Promise<any[]>((resolve) => {
const entries: any[] = []
this.db
.createValueStream({
gte: this._makeKey(key, startIndex),
lt: this._makeKey(key, endIndex),
})
.on('data', (transaction: string) => {
entries.push(JSON.parse(transaction))
})
.on('error', () => {
resolve(null)
})
.on('close', () => {
// TODO: Close vs end? Need to double check later.
resolve(entries)
})
.on('end', () => {
resolve(entries)
})
})
} catch (err) {
return []
}
}
public async put<TEntry>(
entries: {
key: string
index: number
value: TEntry
}[]
): Promise<void> {
return this.db.batch(
entries.map((entry) => {
return {
type: 'put',
key: this._makeKey(entry.key, entry.index),
value: JSON.stringify(entry.value),
}
})
)
}
private _makeKey(key: string, index: number): string {
// prettier-ignore
return `${key}:${BigNumber.from(index).toString().padStart(32, '0')}`
}
}
This diff is collapsed.
export type EventName =
| 'TransactionEnqueued'
| 'SequencerBatchAppended'
| 'StateBatchAppended'
| 'SequencerBatchAppendedTransaction'
export class MissingElementError extends Error {
constructor(public name: EventName) {
super(`missing event: ${name}`)
}
}
/* Imports: External */
import { BigNumber, ethers, constants } from 'ethers'
import { serialize, Transaction } from '@ethersproject/transactions'
import { getContractFactory } from '@eth-optimism/contracts'
import {
toHexString,
toRpcHexString,
BatchType,
SequencerBatch,
} from '@eth-optimism/core-utils'
import { SequencerBatchAppendedEvent } from '@eth-optimism/contracts/dist/types/contracts/L1/rollup/CanonicalTransactionChain'
/* Imports: Internal */
import { MissingElementError } from './errors'
import {
DecodedSequencerBatchTransaction,
SequencerBatchAppendedExtraData,
SequencerBatchAppendedParsedEvent,
TransactionBatchEntry,
TransactionEntry,
EventHandlerSet,
} from '../../../types'
import { parseSignatureVParam } from '../../../utils'
export const handleEventsSequencerBatchAppended: EventHandlerSet<
SequencerBatchAppendedEvent,
SequencerBatchAppendedExtraData,
SequencerBatchAppendedParsedEvent
> = {
getExtraData: async (event, l1RpcProvider) => {
const l1Transaction = await event.getTransaction()
const eventBlock = await event.getBlock()
// TODO: We need to update our events so that we actually have enough information to parse this
// batch without having to pull out this extra event. For the meantime, we need to find this
// "TransactonBatchAppended" event to get the rest of the data.
const CanonicalTransactionChain = getContractFactory(
'CanonicalTransactionChain'
)
.attach(event.address)
.connect(l1RpcProvider)
const batchSubmissionEvent = (
await CanonicalTransactionChain.queryFilter(
CanonicalTransactionChain.filters.TransactionBatchAppended(),
eventBlock.number,
eventBlock.number
)
).find((foundEvent: ethers.Event) => {
// We might have more than one event in this block, so we specifically want to find a
// "TransactonBatchAppended" event emitted immediately before the event in question.
return (
foundEvent.transactionHash === event.transactionHash &&
foundEvent.logIndex === event.logIndex - 1
)
})
if (!batchSubmissionEvent) {
throw new Error(
`Well, this really shouldn't happen. A SequencerBatchAppended event doesn't have a corresponding TransactionBatchAppended event.`
)
}
return {
timestamp: eventBlock.timestamp,
blockNumber: eventBlock.number,
submitter: l1Transaction.from,
l1TransactionHash: l1Transaction.hash,
l1TransactionData: l1Transaction.data,
prevTotalElements: batchSubmissionEvent.args._prevTotalElements,
batchIndex: batchSubmissionEvent.args._batchIndex,
batchSize: batchSubmissionEvent.args._batchSize,
batchRoot: batchSubmissionEvent.args._batchRoot,
batchExtraData: batchSubmissionEvent.args._extraData,
}
},
parseEvent: (event, extraData, l2ChainId) => {
const transactionEntries: TransactionEntry[] = []
// 12 * 2 + 2 = 26
if (extraData.l1TransactionData.length < 26) {
throw new Error(
`Block ${extraData.blockNumber} transaction data is too small: ${extraData.l1TransactionData.length}`
)
}
// TODO: typings not working?
const decoded = (SequencerBatch as any).fromHex(extraData.l1TransactionData)
// Keep track of the CTC index
let transactionIndex = 0
// Keep track of the number of deposits
let enqueuedCount = 0
// Keep track of the tx index in the current batch
let index = 0
for (const context of decoded.contexts) {
for (let j = 0; j < context.numSequencedTransactions; j++) {
const buf = decoded.transactions[index]
if (!buf) {
throw new Error(
`Invalid batch context, tx count: ${decoded.transactions.length}, attempting to parse ${index}`
)
}
const tx = buf.toTransaction()
transactionEntries.push({
index: extraData.prevTotalElements
.add(BigNumber.from(transactionIndex))
.toNumber(),
batchIndex: extraData.batchIndex.toNumber(),
blockNumber: BigNumber.from(context.blockNumber).toNumber(),
timestamp: BigNumber.from(context.timestamp).toNumber(),
gasLimit: BigNumber.from(0).toString(),
target: constants.AddressZero,
origin: null,
data: serialize(
{
nonce: tx.nonce,
gasPrice: tx.gasPrice,
gasLimit: tx.gasLimit,
to: tx.to,
value: tx.value,
data: tx.data,
},
{
v: tx.v,
r: tx.r,
s: tx.s,
}
),
queueOrigin: 'sequencer',
value: toRpcHexString(tx.value),
queueIndex: null,
decoded: mapSequencerTransaction(tx, l2ChainId),
confirmed: true,
})
transactionIndex++
index++
}
for (let j = 0; j < context.numSubsequentQueueTransactions; j++) {
const queueIndex = event.args._startingQueueIndex.add(
BigNumber.from(enqueuedCount)
)
// Okay, so. Since events are processed in parallel, we don't know if the Enqueue
// event associated with this queue element has already been processed. So we'll ask
// the api to fetch that data for itself later on and we use fake values for some
// fields. The real TODO here is to make sure we fix this data structure to avoid ugly
// "dummy" fields.
transactionEntries.push({
index: extraData.prevTotalElements
.add(BigNumber.from(transactionIndex))
.toNumber(),
batchIndex: extraData.batchIndex.toNumber(),
blockNumber: BigNumber.from(0).toNumber(),
timestamp: context.timestamp,
gasLimit: BigNumber.from(0).toString(),
target: constants.AddressZero,
origin: constants.AddressZero,
data: '0x',
queueOrigin: 'l1',
value: '0x0',
queueIndex: queueIndex.toNumber(),
decoded: null,
confirmed: true,
})
enqueuedCount++
transactionIndex++
}
}
const transactionBatchEntry: TransactionBatchEntry = {
index: extraData.batchIndex.toNumber(),
root: extraData.batchRoot,
size: extraData.batchSize.toNumber(),
prevTotalElements: extraData.prevTotalElements.toNumber(),
extraData: extraData.batchExtraData,
blockNumber: BigNumber.from(extraData.blockNumber).toNumber(),
timestamp: BigNumber.from(extraData.timestamp).toNumber(),
submitter: extraData.submitter,
l1TransactionHash: extraData.l1TransactionHash,
type: BatchType[decoded.type],
}
return {
transactionBatchEntry,
transactionEntries,
}
},
storeEvent: async (entry, db) => {
// Defend against situations where we missed an event because the RPC provider
// (infura/alchemy/whatever) is missing an event.
if (entry.transactionBatchEntry.index > 0) {
const prevTransactionBatchEntry = await db.getTransactionBatchByIndex(
entry.transactionBatchEntry.index - 1
)
// We should *always* have a previous transaction batch here.
if (prevTransactionBatchEntry === null) {
throw new MissingElementError('SequencerBatchAppended')
}
}
// Same consistency checks but for transaction entries.
if (
entry.transactionEntries.length > 0 &&
entry.transactionEntries[0].index > 0
) {
const prevTransactionEntry = await db.getTransactionByIndex(
entry.transactionEntries[0].index - 1
)
// We should *always* have a previous transaction here.
if (prevTransactionEntry === null) {
throw new MissingElementError('SequencerBatchAppendedTransaction')
}
}
await db.putTransactionEntries(entry.transactionEntries)
// Add an additional field to the enqueued transactions in the database
// if they have already been confirmed
for (const transactionEntry of entry.transactionEntries) {
if (transactionEntry.queueOrigin === 'l1') {
await db.putTransactionIndexByQueueIndex(
transactionEntry.queueIndex,
transactionEntry.index
)
}
}
await db.putTransactionBatchEntries([entry.transactionBatchEntry])
},
}
const mapSequencerTransaction = (
tx: Transaction,
l2ChainId: number
): DecodedSequencerBatchTransaction => {
return {
nonce: BigNumber.from(tx.nonce).toString(),
gasPrice: BigNumber.from(tx.gasPrice).toString(),
gasLimit: BigNumber.from(tx.gasLimit).toString(),
value: toRpcHexString(tx.value),
target: tx.to ? toHexString(tx.to) : null,
data: toHexString(tx.data),
sig: {
v: parseSignatureVParam(tx.v, l2ChainId),
r: toHexString(tx.r),
s: toHexString(tx.s),
},
}
}
/* Imports: External */
import { StateBatchAppendedEvent } from '@eth-optimism/contracts/dist/types/contracts/L1/rollup/StateCommitmentChain'
import { getContractFactory } from '@eth-optimism/contracts'
import { BigNumber } from 'ethers'
/* Imports: Internal */
import { MissingElementError } from './errors'
import {
StateRootBatchEntry,
StateBatchAppendedExtraData,
StateBatchAppendedParsedEvent,
StateRootEntry,
EventHandlerSet,
} from '../../../types'
export const handleEventsStateBatchAppended: EventHandlerSet<
StateBatchAppendedEvent,
StateBatchAppendedExtraData,
StateBatchAppendedParsedEvent
> = {
getExtraData: async (event) => {
const eventBlock = await event.getBlock()
const l1Transaction = await event.getTransaction()
return {
timestamp: eventBlock.timestamp,
blockNumber: eventBlock.number,
submitter: l1Transaction.from,
l1TransactionHash: l1Transaction.hash,
l1TransactionData: l1Transaction.data,
}
},
parseEvent: (event, extraData) => {
const stateRoots = getContractFactory(
'StateCommitmentChain'
).interface.decodeFunctionData(
'appendStateBatch',
extraData.l1TransactionData
)[0]
const stateRootEntries: StateRootEntry[] = []
for (let i = 0; i < stateRoots.length; i++) {
stateRootEntries.push({
index: event.args._prevTotalElements.add(BigNumber.from(i)).toNumber(),
batchIndex: event.args._batchIndex.toNumber(),
value: stateRoots[i],
confirmed: true,
})
}
// Using .toNumber() here and in other places because I want to move everything to use
// BigNumber + hex, but that'll take a lot of work. This makes it easier in the future.
const stateRootBatchEntry: StateRootBatchEntry = {
index: event.args._batchIndex.toNumber(),
blockNumber: BigNumber.from(extraData.blockNumber).toNumber(),
timestamp: BigNumber.from(extraData.timestamp).toNumber(),
submitter: extraData.submitter,
size: event.args._batchSize.toNumber(),
root: event.args._batchRoot,
prevTotalElements: event.args._prevTotalElements.toNumber(),
extraData: event.args._extraData,
l1TransactionHash: extraData.l1TransactionHash,
type: 'LEGACY', // There is currently only 1 state root batch type
}
return {
stateRootBatchEntry,
stateRootEntries,
}
},
storeEvent: async (entry, db) => {
// Defend against situations where we missed an event because the RPC provider
// (infura/alchemy/whatever) is missing an event.
if (entry.stateRootBatchEntry.index > 0) {
const prevStateRootBatchEntry = await db.getStateRootBatchByIndex(
entry.stateRootBatchEntry.index - 1
)
// We should *always* have a previous batch entry here.
if (prevStateRootBatchEntry === null) {
throw new MissingElementError('StateBatchAppended')
}
}
await db.putStateRootBatchEntries([entry.stateRootBatchEntry])
await db.putStateRootEntries(entry.stateRootEntries)
},
}
/* Imports: External */
import { BigNumber } from 'ethers'
import { TransactionEnqueuedEvent } from '@eth-optimism/contracts/dist/types/contracts/L1/rollup/CanonicalTransactionChain'
/* Imports: Internal */
import { MissingElementError } from './errors'
import { EnqueueEntry, EventHandlerSet } from '../../../types'
export const handleEventsTransactionEnqueued: EventHandlerSet<
TransactionEnqueuedEvent,
null,
EnqueueEntry
> = {
getExtraData: async () => {
return null
},
parseEvent: (event) => {
return {
index: event.args._queueIndex.toNumber(),
target: event.args._target,
data: event.args._data,
gasLimit: event.args._gasLimit.toString(),
origin: event.args._l1TxOrigin,
blockNumber: BigNumber.from(event.blockNumber).toNumber(),
timestamp: event.args._timestamp.toNumber(),
ctcIndex: null,
}
},
storeEvent: async (entry, db) => {
// Defend against situations where we missed an event because the RPC provider
// (infura/alchemy/whatever) is missing an event.
if (entry.index > 0) {
const prevEnqueueEntry = await db.getEnqueueByIndex(entry.index - 1)
// We should *alwaus* have a previous enqueue entry here.
if (prevEnqueueEntry === null) {
throw new MissingElementError('TransactionEnqueued')
}
}
await db.putEnqueueEntries([entry])
},
}
export type EventName = 'SequencerTransaction'
export class MissingElementError extends Error {
constructor(public name: EventName) {
super(`missing event: ${name}`)
}
}
/* Imports: External */
import { BigNumber, ethers } from 'ethers'
import { serialize } from '@ethersproject/transactions'
import { padHexString } from '@eth-optimism/core-utils'
/* Imports: Internal */
import { TransportDB } from '../../../db/transport-db'
import {
DecodedSequencerBatchTransaction,
StateRootEntry,
TransactionEntry,
} from '../../../types'
import { parseSignatureVParam } from '../../../utils'
import { MissingElementError } from './errors'
export const handleSequencerBlock = {
parseBlock: async (
block: any,
chainId: number
): Promise<{
transactionEntry: TransactionEntry
stateRootEntry: StateRootEntry
}> => {
const transaction = block.transactions[0]
const transactionIndex =
BigNumber.from(transaction.blockNumber).toNumber() - 1
// We make the assumption that you don't need to sync the genesis block
if (transactionIndex < 0) {
throw new Error('should not happen, attempted to sync genesis block')
}
let transactionEntry: Partial<TransactionEntry> = {
// Legacy support.
index: transactionIndex,
value: transaction.value,
batchIndex: null,
blockNumber: BigNumber.from(transaction.l1BlockNumber).toNumber(),
timestamp: BigNumber.from(transaction.l1Timestamp).toNumber(),
queueOrigin: transaction.queueOrigin,
confirmed: false,
}
if (transaction.queueOrigin === 'sequencer') {
const decodedTransaction: DecodedSequencerBatchTransaction = {
sig: {
v: parseSignatureVParam(transaction.v, chainId),
r: padHexString(transaction.r, 32),
s: padHexString(transaction.s, 32),
},
value: transaction.value,
gasLimit: BigNumber.from(transaction.gas).toString(),
gasPrice: BigNumber.from(transaction.gasPrice).toString(),
nonce: BigNumber.from(transaction.nonce).toString(),
target: transaction.to,
data: transaction.input,
}
transactionEntry = {
...transactionEntry,
gasLimit: BigNumber.from(0).toString(),
target: ethers.constants.AddressZero,
origin: null,
data: serialize(
{
value: transaction.value,
gasLimit: transaction.gas,
gasPrice: transaction.gasPrice,
nonce: transaction.nonce,
to: transaction.to,
data: transaction.input,
chainId,
},
{
v: BigNumber.from(transaction.v).toNumber(),
r: padHexString(transaction.r, 32),
s: padHexString(transaction.s, 32),
}
),
decoded: decodedTransaction,
queueIndex: null,
}
} else {
transactionEntry = {
...transactionEntry,
gasLimit: BigNumber.from(transaction.gas).toString(),
target: ethers.utils.getAddress(transaction.to),
origin: ethers.utils.getAddress(transaction.l1TxOrigin),
data: transaction.input,
decoded: null,
queueIndex:
transaction.queueIndex === null ||
transaction.queueIndex === undefined
? BigNumber.from(transaction.nonce).toNumber()
: BigNumber.from(transaction.queueIndex).toNumber(),
}
}
const stateRootEntry: StateRootEntry = {
index: transactionIndex,
batchIndex: null,
value: block.stateRoot,
confirmed: false,
}
return {
transactionEntry: transactionEntry as TransactionEntry, // Not the cleanest thing in the world. Could be improved.
stateRootEntry,
}
},
storeBlock: async (
entry: {
transactionEntry: TransactionEntry
stateRootEntry: StateRootEntry
},
db: TransportDB
): Promise<void> => {
if (entry.transactionEntry.index > 0) {
const prevTransactionEntry = await db.getUnconfirmedTransactionByIndex(
entry.transactionEntry.index - 1
)
// We should *always* have a previous transaction here.
if (prevTransactionEntry === null) {
throw new MissingElementError('SequencerTransaction')
}
}
// Having separate indices for confirmed/unconfirmed means we never have to worry about
// accidentally overwriting a confirmed transaction with an unconfirmed one. Unconfirmed
// transactions are purely extra information.
await db.putUnconfirmedTransactionEntries([entry.transactionEntry])
await db.putUnconfirmedStateRootEntries([entry.stateRootEntry])
},
}
/* Imports: External */
import { BaseService, LegacyMetrics } from '@eth-optimism/common-ts'
import { LevelUp } from 'levelup'
import level from 'level6'
import { Counter } from 'prom-client'
/* Imports: Internal */
import { L1IngestionService } from '../l1-ingestion/service'
import { L1TransportServer } from '../server/service'
import { validators } from '../../utils'
import { L2IngestionService } from '../l2-ingestion/service'
import { BSS_HF1_INDEX } from '../../config'
export interface L1DataTransportServiceOptions {
nodeEnv: string
ethNetworkName?: 'mainnet' | 'kovan' | 'goerli'
release: string
addressManager: string
confirmations: number
dangerouslyCatchAllErrors?: boolean
hostname: string
l1RpcProvider: string
l1RpcProviderUser?: string
l1RpcProviderPassword?: string
l2ChainId: number
l2RpcProvider: string
l2RpcProviderUser?: string
l2RpcProviderPassword?: string
l1SyncShutoffBlock?: number
metrics?: LegacyMetrics
dbPath: string
logsPerPollingInterval: number
pollingInterval: number
port: number
syncFromL1?: boolean
syncFromL2?: boolean
transactionsPerPollingInterval: number
legacySequencerCompatibility: boolean
useSentry?: boolean
sentryDsn?: string
sentryTraceRate?: number
defaultBackend: string
l1GasPriceBackend: string
l1StartHeight?: number
}
const optionSettings = {
syncFromL1: {
default: true,
validate: validators.isBoolean,
},
syncFromL2: {
default: false,
validate: validators.isBoolean,
},
}
// prettier-ignore
export class L1DataTransportService extends BaseService<L1DataTransportServiceOptions> {
constructor(options: L1DataTransportServiceOptions) {
super('L1_Data_Transport_Service', options, optionSettings)
}
private state: {
db: LevelUp
l1IngestionService?: L1IngestionService
l2IngestionService?: L2IngestionService
l1TransportServer: L1TransportServer
metrics: LegacyMetrics
failureCounter: Counter<string>
} = {} as any
protected async _init(): Promise<void> {
this.logger.info('Initializing L1 Data Transport Service...')
this.state.db = level(this.options.dbPath)
await this.state.db.open()
// BSS HF1 activates at block 0 if not specified.
const bssHf1Index = BSS_HF1_INDEX[this.options.l2ChainId] || 0
this.logger.info(`L2 chain ID is: ${this.options.l2ChainId}`)
this.logger.info(`BSS HF1 will activate at: ${bssHf1Index}`)
this.state.metrics = new LegacyMetrics({
labels: {
environment: this.options.nodeEnv,
network: this.options.ethNetworkName,
release: this.options.release,
service: this.name,
}
})
this.state.metrics.client.collectDefaultMetrics({
prefix: 'data_transport_layer_'
})
this.state.failureCounter = new this.state.metrics.client.Counter({
name: 'data_transport_layer_main_service_failures',
help: 'Counts the number of times that the main service fails',
registers: [this.state.metrics.registry],
})
this.state.l1TransportServer = new L1TransportServer({
...this.options,
metrics: this.state.metrics,
db: this.state.db,
})
// Optionally enable sync from L1.
if (this.options.syncFromL1) {
this.state.l1IngestionService = new L1IngestionService({
...this.options,
metrics: this.state.metrics,
db: this.state.db,
})
}
// Optionally enable sync from L2.
if (this.options.syncFromL2) {
this.state.l2IngestionService = new L2IngestionService({
...(this.options as any), // TODO: Correct thing to do here is to assert this type.
metrics: this.state.metrics,
db: this.state.db,
})
}
await this.state.l1TransportServer.init()
if (this.options.syncFromL1) {
await this.state.l1IngestionService.init()
}
if (this.options.syncFromL2) {
await this.state.l2IngestionService.init()
}
}
protected async _start(): Promise<void> {
try {
await Promise.all([
this.state.l1TransportServer.start(),
this.options.syncFromL1 ? this.state.l1IngestionService.start() : null,
this.options.syncFromL2 ? this.state.l2IngestionService.start() : null,
])
} catch (e) {
this.state.failureCounter.inc()
throw e
}
}
protected async _stop(): Promise<void> {
try {
await Promise.all([
this.state.l1TransportServer.stop(),
this.options.syncFromL1 ? this.state.l1IngestionService.stop() : null,
this.options.syncFromL2 ? this.state.l2IngestionService.stop() : null,
])
await this.state.db.close()
} catch (e) {
this.state.failureCounter.inc()
throw e
}
}
}
/* Imports: External */
import * as dotenv from 'dotenv'
import { Bcfg } from '@eth-optimism/core-utils'
import Config from 'bcfg'
/* Imports: Internal */
import { L1DataTransportService } from './main/service'
type ethNetwork = 'mainnet' | 'kovan' | 'goerli'
;(async () => {
try {
dotenv.config()
const config: Bcfg = new Config('data-transport-layer')
config.load({
env: true,
argv: true,
})
const service = new L1DataTransportService({
nodeEnv: config.str('node-env', 'development'),
ethNetworkName: config.str('eth-network-name') as ethNetwork,
release: `data-transport-layer@${process.env.npm_package_version}`,
dbPath: config.str('db-path', './db'),
port: config.uint('server-port', 7878),
hostname: config.str('server-hostname', 'localhost'),
confirmations: config.uint('confirmations', 35),
l1RpcProvider: config.str('l1-rpc-endpoint'),
l1RpcProviderUser: config.str('l1-rpc-user'),
l1RpcProviderPassword: config.str('l1-rpc-password'),
addressManager: config.str('address-manager'),
l1SyncShutoffBlock: config.uint('l1-sync-shutoff-block'),
pollingInterval: config.uint('polling-interval', 5000),
logsPerPollingInterval: config.uint('logs-per-polling-interval', 2000),
dangerouslyCatchAllErrors: config.bool(
'dangerously-catch-all-errors',
false
),
l2RpcProvider: config.str('l2-rpc-endpoint'),
l2RpcProviderUser: config.str('l2-rpc-user'),
l2RpcProviderPassword: config.str('l2-rpc-password'),
l2ChainId: config.uint('l2-chain-id'),
syncFromL1: config.bool('sync-from-l1', true),
syncFromL2: config.bool('sync-from-l2', false),
transactionsPerPollingInterval: config.uint(
'transactions-per-polling-interval',
1000
),
legacySequencerCompatibility: config.bool(
'legacy-sequencer-compatibility',
false
),
defaultBackend: config.str('default-backend', 'l1'),
l1GasPriceBackend: config.str('l1-gas-price-backend', 'l1'),
l1StartHeight: config.uint('l1-start-height'),
useSentry: config.bool('use-sentry', false),
sentryDsn: config.str('sentry-dsn'),
sentryTraceRate: config.ufloat('sentry-trace-rate', 0.05),
})
const stop = async (signal) => {
console.log(`"{"msg": "${signal} - Stopping data-transport layer"}"`)
await service.stop()
process.exit()
}
process.on('SIGTERM', stop)
process.on('SIGINT', stop)
await service.start()
} catch (err) {
console.error(
`Well, that's that. We ran into a fatal error. Here's the dump. Goodbye!`
)
throw err
}
})()
import {
EnqueueEntry,
StateRootBatchEntry,
StateRootEntry,
TransactionBatchEntry,
TransactionEntry,
} from './database-types'
export type EnqueueResponse = EnqueueEntry & {
ctcIndex: number | null
}
export interface TransactionResponse {
batch: TransactionBatchEntry
transaction: TransactionEntry
}
export interface TransactionBatchResponse {
batch: TransactionBatchEntry
transactions: TransactionEntry[]
}
export interface StateRootResponse {
batch: StateRootBatchEntry
stateRoot: StateRootEntry
}
export interface StateRootBatchResponse {
batch: StateRootBatchEntry
stateRoots: StateRootEntry[]
}
export interface ContextResponse {
blockNumber: number
timestamp: number
blockHash: string
}
export interface GasPriceResponse {
gasPrice: string
}
export type SyncingResponse =
| {
syncing: true
highestKnownTransactionIndex: number
currentTransactionIndex: number
}
| {
syncing: false
currentTransactionIndex: number
}
export interface DecodedSequencerBatchTransaction {
sig: {
r: string
s: string
v: number
}
value: string
gasLimit: string
gasPrice: string
nonce: string
target: string
data: string
}
export interface EnqueueEntry {
index: number
target: string
data: string
gasLimit: string
origin: string
blockNumber: number
timestamp: number
}
export interface TransactionEntry {
index: number
batchIndex: number
data: string
blockNumber: number
timestamp: number
gasLimit: string
target: string
origin: string
value: string
queueOrigin: 'sequencer' | 'l1'
queueIndex: number | null
decoded: DecodedSequencerBatchTransaction | null
confirmed: boolean
}
interface BatchEntry {
index: number
blockNumber: number
timestamp: number
submitter: string
size: number
root: string
prevTotalElements: number
extraData: string
l1TransactionHash: string
type: string
}
export type TransactionBatchEntry = BatchEntry
export type StateRootBatchEntry = BatchEntry
export interface StateRootEntry {
index: number
batchIndex: number
value: string
confirmed: boolean
}
import { BaseProvider } from '@ethersproject/providers'
import { BigNumber } from 'ethers'
import { TypedEvent } from '@eth-optimism/contracts/dist/types/common'
import {
TransactionBatchEntry,
TransactionEntry,
StateRootBatchEntry,
StateRootEntry,
} from './database-types'
import { TransportDB } from '../db/transport-db'
export type GetExtraDataHandler<TEvent extends TypedEvent, TExtraData> = (
event?: TEvent,
l1RpcProvider?: BaseProvider
) => Promise<TExtraData>
export type ParseEventHandler<
TEvent extends TypedEvent,
TExtraData,
TParsedEvent
> = (event: TEvent, extraData: TExtraData, l2ChainId: number) => TParsedEvent
export type StoreEventHandler<TParsedEvent> = (
parsedEvent: TParsedEvent,
db: TransportDB
) => Promise<void>
export interface EventHandlerSet<
TEvent extends TypedEvent,
TExtraData,
TParsedEvent
> {
getExtraData: GetExtraDataHandler<TEvent, TExtraData>
parseEvent: ParseEventHandler<TEvent, TExtraData, TParsedEvent>
storeEvent: StoreEventHandler<TParsedEvent>
}
export interface SequencerBatchAppendedExtraData {
timestamp: number
blockNumber: number
submitter: string
l1TransactionData: string
l1TransactionHash: string
// Stuff from TransactionBatchAppended.
prevTotalElements: BigNumber
batchIndex: BigNumber
batchSize: BigNumber
batchRoot: string
batchExtraData: string
}
export interface SequencerBatchAppendedParsedEvent {
transactionBatchEntry: TransactionBatchEntry
transactionEntries: TransactionEntry[]
}
export interface StateBatchAppendedExtraData {
timestamp: number
blockNumber: number
submitter: string
l1TransactionHash: string
l1TransactionData: string
}
export interface StateBatchAppendedParsedEvent {
stateRootBatchEntry: StateRootBatchEntry
stateRootEntries: StateRootEntry[]
}
export * from './api-types'
export * from './database-types'
export * from './event-handler-types'
/* Imports: External */
import { constants, Contract, Signer } from 'ethers'
import { BaseProvider } from '@ethersproject/providers'
import { getContractInterface } from '@eth-optimism/contracts'
export const loadContract = (
name: string,
address: string,
provider: BaseProvider
): Contract => {
return new Contract(address, getContractInterface(name) as any, provider)
}
export const loadProxyFromManager = async (
name: string,
proxy: string,
Lib_AddressManager: Contract,
provider: BaseProvider
): Promise<Contract> => {
const address = await Lib_AddressManager.getAddress(proxy)
if (address === constants.AddressZero) {
throw new Error(
`Lib_AddressManager does not have a record for a contract named: ${proxy}`
)
}
return loadContract(name, address, provider)
}
export interface OptimismContracts {
Lib_AddressManager: Contract
StateCommitmentChain: Contract
CanonicalTransactionChain: Contract
}
export const loadOptimismContracts = async (
l1RpcProvider: BaseProvider,
addressManagerAddress: string,
signer?: Signer
): Promise<OptimismContracts> => {
const Lib_AddressManager = loadContract(
'Lib_AddressManager',
addressManagerAddress,
l1RpcProvider
)
const inputs = [
{
name: 'StateCommitmentChain',
interface: 'IStateCommitmentChain',
},
{
name: 'CanonicalTransactionChain',
interface: 'ICanonicalTransactionChain',
},
]
const contracts = {}
for (const input of inputs) {
contracts[input.name] = await loadProxyFromManager(
input.interface,
input.name,
Lib_AddressManager,
l1RpcProvider
)
if (signer) {
contracts[input.name] = contracts[input.name].connect(signer)
}
}
contracts['Lib_AddressManager'] = Lib_AddressManager
// TODO: sorry
return contracts as OptimismContracts
}
/* Imports: External */
import { ethers } from 'ethers'
export const parseSignatureVParam = (
v: number | ethers.BigNumber | string,
chainId: number
): number => {
v = ethers.BigNumber.from(v).toNumber()
// Handle unprotected transactions
if (v === 27 || v === 28) {
return v
}
// Handle EIP155 transactions
return v - 2 * chainId - 35
}
export * from './contracts'
export * from './validation'
export * from './eth-tx'
import * as url from 'url'
import { fromHexString } from '@eth-optimism/core-utils'
export const validators = {
isBoolean: (val: any): boolean => {
return typeof val === 'boolean'
},
isString: (val: any): boolean => {
return typeof val === 'string'
},
isHexString: (val: any): boolean => {
return (
validators.isString(val) &&
val.startsWith('0x') &&
fromHexString(val).length === (val.length - 2) / 2
)
},
isAddress: (val: any): boolean => {
return validators.isHexString(val) && val.length === 42
},
isInteger: (val: any): boolean => {
return Number.isInteger(val)
},
isUrl: (val: any): boolean => {
try {
const parsed = new url.URL(val)
return (
parsed.protocol === 'ws:' ||
parsed.protocol === 'http:' ||
parsed.protocol === 'https:'
)
} catch (err) {
return false
}
},
isJsonRpcProvider: (val: any): boolean => {
return val && val.ready !== undefined
},
isLevelUP: (val: any): boolean => {
// TODO: Fix?
return val && val.db
},
}
import { HardhatUserConfig } from 'hardhat/config'
const config: HardhatUserConfig = {
// All paths relative to ** this file **.
paths: {
tests: '../../test',
cache: '../temp/cache',
artifacts: '../temp/artifacts',
},
}
export default config
import fs from 'fs'
import path from 'path'
import chai = require('chai')
import Mocha from 'mocha'
import chaiAsPromised from 'chai-as-promised'
import { BigNumber } from 'ethers'
// Chai plugins go here.
chai.use(chaiAsPromised)
const should = chai.should()
const expect = chai.expect
const readMockData = () => {
const mockDataPath = path.join(__dirname, 'unit-tests', 'examples')
const paths = fs.readdirSync(mockDataPath)
const files = []
for (const filename of paths) {
// Skip non .txt files
if (!filename.endsWith('.txt')) {
continue
}
const filePath = path.join(mockDataPath, filename)
const file = fs.readFileSync(filePath)
const obj = JSON.parse(file.toString())
// Reserialize the BigNumbers
obj.input.extraData.prevTotalElements = BigNumber.from(
obj.input.extraData.prevTotalElements
)
obj.input.extraData.batchIndex = BigNumber.from(
obj.input.extraData.batchIndex
)
if (obj.input.event.args.length !== 3) {
throw new Error(`ABI mismatch`)
}
obj.input.event.args = obj.input.event.args.map(BigNumber.from)
obj.input.event.args._startingQueueIndex = obj.input.event.args[0]
obj.input.event.args._numQueueElements = obj.input.event.args[1]
obj.input.event.args._totalElements = obj.input.event.args[2]
obj.input.extraData.batchSize = BigNumber.from(
obj.input.extraData.batchSize
)
files.push(obj)
}
return files
}
export { should, expect, Mocha, readMockData }
import { BigNumber, ethers } from 'ethers'
import { sequencerBatch, add0x, BatchType } from '@eth-optimism/core-utils'
const compressBatchWithZlib = (calldata: string): string => {
const batch = sequencerBatch.decode(calldata)
batch.type = BatchType.ZLIB
const encoded = sequencerBatch.encode(batch)
return add0x(encoded)
}
/* Imports: Internal */
import { expect, readMockData } from '../../../../setup'
import { handleEventsSequencerBatchAppended } from '../../../../../src/services/l1-ingestion/handlers/sequencer-batch-appended'
import { SequencerBatchAppendedExtraData } from '../../../../../src/types'
describe('Event Handlers: CanonicalTransactionChain.SequencerBatchAppended', () => {
const mockData = readMockData()
describe('handleEventsSequencerBatchAppended.parseEvent', () => {
// This tests the behavior of parsing a real mainnet transaction,
// so it will break if the encoding scheme changes.
// Transaction and extra data from
// https://etherscan.io/tx/0x6effe006836b841205ace4d99d7ae1b74ee96aac499a3f358b97fccd32ee9af2
const exampleExtraData = {
timestamp: 1614862375,
blockNumber: 11969713,
submitter: '0xfd7d4de366850c08ee2cba32d851385a3071ec8d',
l1TransactionHash:
'0x6effe006836b841205ace4d99d7ae1b74ee96aac499a3f358b97fccd32ee9af2',
gasLimit: '548976',
prevTotalElements: BigNumber.from(73677),
batchIndex: BigNumber.from(743),
batchSize: BigNumber.from(101),
batchRoot:
'10B99425FB53AD7D40A939205C0F7B35CBB89AB4D67E7AE64BDAC5F1073943B4',
batchExtraData: '',
}
it('should error on malformed transaction data', async () => {
const input1: [any, SequencerBatchAppendedExtraData, number] = [
{
args: {
_startingQueueIndex: ethers.constants.Zero,
_numQueueElements: ethers.constants.Zero,
_totalElements: ethers.constants.Zero,
},
},
{
l1TransactionData: '0x00000',
...exampleExtraData,
},
0,
]
expect(() => {
handleEventsSequencerBatchAppended.parseEvent(...input1)
}).to.throw(
`Block ${input1[1].blockNumber} transaction data is too small: ${input1[1].l1TransactionData.length}`
)
})
describe('mainnet transactions', () => {
for (const mock of mockData) {
const { input, output } = mock
const { event, extraData, l2ChainId } = input
const hash = mock.input.extraData.l1TransactionHash
it(`uncompressed: ${hash}`, () => {
// Set the type to be legacy
output.transactionBatchEntry.type = BatchType[BatchType.LEGACY]
const res = handleEventsSequencerBatchAppended.parseEvent(
event,
extraData,
l2ChainId
)
// Check all of the transaction entries individually
for (const [i, got] of res.transactionEntries.entries()) {
const expected = output.transactionEntries[i]
expect(got).to.deep.eq(expected, `case ${i}`)
}
expect(res).to.deep.eq(output)
})
it(`compressed: ${hash}`, () => {
// Set the type to be zlib
output.transactionBatchEntry.type = BatchType[BatchType.ZLIB]
const compressed = compressBatchWithZlib(
input.extraData.l1TransactionData
)
const copy = { ...extraData }
copy.l1TransactionData = compressed
const res = handleEventsSequencerBatchAppended.parseEvent(
event,
copy,
l2ChainId
)
expect(res).to.deep.eq(output)
})
}
})
})
})
/* Imports: External */
import { BigNumber } from 'ethers'
/* Imports: Internal */
import { expect } from '../../../../setup'
import { handleEventsStateBatchAppended } from '../../../../../src/services/l1-ingestion/handlers/state-batch-appended'
import { StateBatchAppendedExtraData } from '../../../../../src/types'
import { l1StateBatchData } from '../../../examples/l1-data'
describe('Event Handlers: CanonicalTransactionChain.StateBatchAppended', () => {
describe('getExtraData', () => {
it('should return event block and transaction', async () => {
// Source: https://etherscan.io/tx/0x4ca72484e93cdb50fe1089984db152258c2bbffc2534dcafbfe032b596bd5b49
const l1Transaction = {
hash: '0x4ca72484e93cdb50fe1089984db152258c2bbffc2534dcafbfe032b596bd5b49',
from: '0xfd7d4de366850c08ee2cba32d851385a3071ec8d',
data: l1StateBatchData,
}
// Source: https://etherscan.io/block/12106615
const eventBlock = {
timestamp: 1616680530,
number: 12106615,
hash: '0x9c40310e19e943ad38e170329465c4489f6aba5895e9cacdac236be181aea31f',
parentHash:
'0xc7707a04c287a22ff4e43e5d9316e45ab342dcd405e7e0284eb51ce71a3a29ac',
miner: '0xea674fdde714fd979de3edf0f56aa9716b898ec8',
nonce: '0x40e6174f521a7cd8',
difficulty: 5990647962682594,
gasLimit: BigNumber.from(548976),
gasUsed: BigNumber.from(12495850),
extraData: '0x65746865726d696e652d6575726f70652d7765737433',
transactions: [l1Transaction.hash],
}
const input1: [any] = [
{
getBlock: () => eventBlock,
getTransaction: () => l1Transaction,
},
]
const output1 = await handleEventsStateBatchAppended.getExtraData(
...input1
)
expect(output1.timestamp).to.equal(eventBlock.timestamp)
expect(output1.blockNumber).to.equal(eventBlock.number)
expect(output1.submitter).to.equal(l1Transaction.from)
expect(output1.l1TransactionHash).to.equal(l1Transaction.hash)
expect(output1.l1TransactionData).to.equal(l1Transaction.data)
})
})
describe('parseEvent', () => {
it('should have a ctcIndex equal to null', () => {
// Source: https://etherscan.io/tx/0x4ca72484e93cdb50fe1089984db152258c2bbffc2534dcafbfe032b596bd5b49#eventlog
const event = {
args: {
_batchIndex: BigNumber.from(144),
_batchRoot:
'AD2039C6E9A8EE58817252CF16AB720BF3ED20CC4B53184F5B11DE09639AA123',
_batchSize: BigNumber.from(522),
_prevTotalElements: BigNumber.from(96000),
_extraData:
'00000000000000000000000000000000000000000000000000000000605C33E2000000000000000000000000FD7D4DE366850C08EE2CBA32D851385A3071EC8D',
},
}
const extraData: StateBatchAppendedExtraData = {
l1TransactionData: l1StateBatchData,
timestamp: 1616680530,
blockNumber: 12106615,
submitter: '0xfd7d4de366850c08ee2cba32d851385a3071ec8d',
l1TransactionHash:
'0x4ca72484e93cdb50fe1089984db152258c2bbffc2534dcafbfe032b596bd5b49',
}
const input1: [any, StateBatchAppendedExtraData, number] = [
event,
extraData,
0,
]
const output1 = handleEventsStateBatchAppended.parseEvent(...input1)
expect(output1.stateRootEntries.length).to.eq(
event.args._batchSize.toNumber()
)
output1.stateRootEntries.forEach((entry, i) => {
expect(entry.index).to.eq(
event.args._prevTotalElements.add(BigNumber.from(i)).toNumber()
)
expect(entry.batchIndex).to.eq(event.args._batchIndex.toNumber())
expect(entry.confirmed).to.be.true
})
const batchEntry = output1.stateRootBatchEntry
expect(batchEntry.index).to.eq(event.args._batchIndex.toNumber())
expect(batchEntry.blockNumber).to.eq(extraData.blockNumber)
expect(batchEntry.timestamp).to.eq(extraData.timestamp)
expect(batchEntry.submitter).to.eq(extraData.submitter)
expect(batchEntry.size).to.eq(event.args._batchSize.toNumber())
expect(batchEntry.root).to.eq(event.args._batchRoot)
expect(batchEntry.prevTotalElements).to.eq(
event.args._prevTotalElements.toNumber()
)
expect(batchEntry.extraData).to.eq(event.args._extraData)
expect(batchEntry.l1TransactionHash).to.eq(extraData.l1TransactionHash)
})
})
})
/* Imports: Internal */
import { expect } from '../../../../setup'
import { l2Block } from '../../../examples/l2-data'
import { handleSequencerBlock } from '../../../../../src/services/l2-ingestion/handlers/transaction'
describe('Handlers: handleSequencerBlock', () => {
describe('parseBlock', () => {
it('should correctly extract key fields from an L2 mainnet transaction', async () => {
const input1: [any, number] = [l2Block, 10]
const output1 = await handleSequencerBlock.parseBlock(...input1)
expect(output1.stateRootEntry.value).to.equal(l2Block.stateRoot)
expect(output1.transactionEntry.decoded.data).to.equal(
l2Block.transactions[0].input
)
})
})
})
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"outDir": "./dist"
},
"include": [
"src/**/*"
]
}
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment