Commit 6ad1dcb9 authored by Liam Horne's avatar Liam Horne

Merge branch 'develop' into regenesis/0.4.0

parents fa29b03e 42decb6e
---
'@eth-optimism/core-utils': patch
---
Minor fix on watchers to pick up finalization of transactions on L1
---
'@eth-optimism/data-transport-layer': patch
---
Add highest L1 and L2 block number Gauge metrics to DTL
---
'@eth-optimism/batch-submitter': patch
---
Improved logging of batch submission timeout logs
---
'@eth-optimism/core-utils': patch
---
improved watcher ability to find transactions during periods of high load
---
'@eth-optimism/data-transport-layer': minor
---
Define L1 Starting block via OwnershipTransferred (occurring on block 1) rather than AddressSet (occuring on block 2 onwards)
{
// See https://go.microsoft.com/fwlink/?LinkId=827846 to learn about workspace recommendations.
// Extension identifier format: ${publisher}.${name}. Example: vscode.csharp
// List of extensions which should be recommended for users of this workspace.
"recommendations": [
"dbaeumer.vscode-eslint",
"editorconfig.editorconfig",
"juanblanco.solidity",
"golang.go",
],
}
{
"editor.formatOnSave": true,
"[typescript]": {
"editor.defaultFormatter": "dbaeumer.vscode-eslint",
"editor.formatOnSave": true,
},
"eslint.nodePath": "./node_modules/eslint/bin/",
"eslint.format.enable": true,
"editorconfig.generateAuto": false,
"files.trimTrailingWhitespace": true,
}
# utils
This package is meant to hold utilities used by
[Optimistic Ethereum](https://github.com/ethereum-optimism/optimism) written in
Golang.
## Packages
### Fees
Package fees includes helpers for dealing with fees on Optimistic Ethereum
#### `EncodeTxGasLimit(data []byte, l1GasPrice, l2GasLimit, l2GasPrice *big.Int) *big.Int`
Encodes `tx.gasLimit` based on the variables that are used to determine it.
`data` - Calldata of the transaction being sent. This data should *not* include the full signed RLP transaction.
`l1GasPrice` - gas price on L1 in wei
`l2GasLimit` - amount of gas provided for execution in L2. Notably, accounts are charged for execution based on this gasLimit, even if the gasUsed ends up being less.
`l2GasPrice` - gas price on L2 in wei
#### `DecodeL2GasLimit(gasLimit *big.Int) *big.Int`
Accepts the return value of `eth_estimateGas` and decodes the L2 gas limit that
is encoded in the return value. This is the gas limit that is passed to the user
contract within the OVM.
package fees
import (
"math/big"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/params"
)
// overhead represents the fixed cost of batch submission of a single
// transaction in gas.
const overhead uint64 = 4200 + 200*params.TxDataNonZeroGasEIP2028
// feeScalar is used to scale the calculations in EncodeL2GasLimit
// to prevent them from being too large
const feeScalar uint64 = 10_000_000
// TxGasPrice is a constant that determines the result of `eth_gasPrice`
// It is scaled upwards by 50%
// tx.gasPrice is hard coded to 1500 * wei and all transactions must set that
// gas price.
const TxGasPrice uint64 = feeScalar + (feeScalar / 2)
// BigTxGasPrice is the L2GasPrice as type big.Int
var BigTxGasPrice = new(big.Int).SetUint64(TxGasPrice)
var bigFeeScalar = new(big.Int).SetUint64(feeScalar)
const tenThousand = 10000
var BigTenThousand = new(big.Int).SetUint64(tenThousand)
// EncodeTxGasLimit computes the `tx.gasLimit` based on the L1/L2 gas prices and
// the L2 gas limit. The L2 gas limit is encoded inside of the lower order bits
// of the number like so: [ | l2GasLimit ]
// [ tx.gaslimit ]
// The lower order bits must be large enough to fit the L2 gas limit, so 10**8
// is chosen. If higher order bits collide with any bits from the L2 gas limit,
// the L2 gas limit will not be able to be decoded.
// An explicit design goal of this scheme was to make the L2 gas limit be human
// readable. The entire number is interpreted as the gas limit when computing
// the fee, so increasing the L2 Gas limit will increase the fee paid.
// The calculation is:
// l1GasLimit = zero_count(data) * 4 + non_zero_count(data) * 16 + overhead
// roundedL2GasLimit = ceilmod(l2GasLimit, 10_000)
// l1Fee = l1GasPrice * l1GasLimit
// l2Fee = l2GasPrice * roundedL2GasLimit
// sum = l1Fee + l2Fee
// scaled = sum / scalar
// rounded = ceilmod(scaled, tenThousand)
// roundedScaledL2GasLimit = roundedL2GasLimit / tenThousand
// result = rounded + roundedScaledL2GasLimit
// Note that for simplicity purposes, only the calldata is passed into this
// function when in reality the RLP encoded transaction should be. The
// additional cost is added to the overhead constant to prevent the need to RLP
// encode transactions during calls to `eth_estimateGas`
func EncodeTxGasLimit(data []byte, l1GasPrice, l2GasLimit, l2GasPrice *big.Int) *big.Int {
l1GasLimit := calculateL1GasLimit(data, overhead)
roundedL2GasLimit := Ceilmod(l2GasLimit, BigTenThousand)
l1Fee := new(big.Int).Mul(l1GasPrice, l1GasLimit)
l2Fee := new(big.Int).Mul(l2GasPrice, roundedL2GasLimit)
sum := new(big.Int).Add(l1Fee, l2Fee)
scaled := new(big.Int).Div(sum, bigFeeScalar)
rounded := Ceilmod(scaled, BigTenThousand)
roundedScaledL2GasLimit := new(big.Int).Div(roundedL2GasLimit, BigTenThousand)
result := new(big.Int).Add(rounded, roundedScaledL2GasLimit)
return result
}
func Ceilmod(a, b *big.Int) *big.Int {
remainder := new(big.Int).Mod(a, b)
if remainder.Cmp(common.Big0) == 0 {
return a
}
sum := new(big.Int).Add(a, b)
rounded := new(big.Int).Sub(sum, remainder)
return rounded
}
// DecodeL2GasLimit decodes the L2 gas limit from an encoded L2 gas limit
func DecodeL2GasLimit(gasLimit *big.Int) *big.Int {
scaled := new(big.Int).Mod(gasLimit, BigTenThousand)
return new(big.Int).Mul(scaled, BigTenThousand)
}
func DecodeL2GasLimitU64(gasLimit uint64) uint64 {
scaled := gasLimit % tenThousand
return scaled * tenThousand
}
// calculateL1GasLimit computes the L1 gasLimit based on the calldata and
// constant sized overhead. The overhead can be decreased as the cost of the
// batch submission goes down via contract optimizations. This will not overflow
// under standard network conditions.
func calculateL1GasLimit(data []byte, overhead uint64) *big.Int {
zeroes, ones := zeroesAndOnes(data)
zeroesCost := zeroes * params.TxDataZeroGas
onesCost := ones * params.TxDataNonZeroGasEIP2028
gasLimit := zeroesCost + onesCost + overhead
return new(big.Int).SetUint64(gasLimit)
}
func zeroesAndOnes(data []byte) (uint64, uint64) {
var zeroes uint64
var ones uint64
for _, byt := range data {
if byt == 0 {
zeroes++
} else {
ones++
}
}
return zeroes, ones
}
package fees
import (
"math/big"
"testing"
"github.com/ethereum/go-ethereum/params"
)
var l1GasLimitTests = map[string]struct {
data []byte
overhead uint64
expect *big.Int
}{
"simple": {[]byte{}, 0, big.NewInt(0)},
"simple-overhead": {[]byte{}, 10, big.NewInt(10)},
"zeros": {[]byte{0x00, 0x00, 0x00, 0x00}, 10, big.NewInt(26)},
"ones": {[]byte{0x01, 0x02, 0x03, 0x04}, 200, big.NewInt(16*4 + 200)},
}
func TestL1GasLimit(t *testing.T) {
for name, tt := range l1GasLimitTests {
t.Run(name, func(t *testing.T) {
got := calculateL1GasLimit(tt.data, tt.overhead)
if got.Cmp(tt.expect) != 0 {
t.Fatal("Calculated gas limit does not match")
}
})
}
}
var feeTests = map[string]struct {
dataLen int
l1GasPrice uint64
l2GasLimit uint64
l2GasPrice uint64
}{
"simple": {
dataLen: 10,
l1GasPrice: params.GWei,
l2GasLimit: 437118,
l2GasPrice: params.GWei,
},
"zero-l2-gasprice": {
dataLen: 10,
l1GasPrice: params.GWei,
l2GasLimit: 196205,
l2GasPrice: 0,
},
"one-l2-gasprice": {
dataLen: 10,
l1GasPrice: params.GWei,
l2GasLimit: 196205,
l2GasPrice: 1,
},
"zero-l1-gasprice": {
dataLen: 10,
l1GasPrice: 0,
l2GasLimit: 196205,
l2GasPrice: params.GWei,
},
"one-l1-gasprice": {
dataLen: 10,
l1GasPrice: 1,
l2GasLimit: 23255,
l2GasPrice: params.GWei,
},
"zero-gasprices": {
dataLen: 10,
l1GasPrice: 0,
l2GasLimit: 23255,
l2GasPrice: 0,
},
"max-gaslimit": {
dataLen: 10,
l1GasPrice: params.GWei,
l2GasLimit: 99_970_000,
l2GasPrice: params.GWei,
},
"larger-divisor": {
dataLen: 10,
l1GasPrice: 0,
l2GasLimit: 10,
l2GasPrice: 0,
},
}
func TestCalculateRollupFee(t *testing.T) {
for name, tt := range feeTests {
t.Run(name, func(t *testing.T) {
data := make([]byte, tt.dataLen)
l1GasPrice := new(big.Int).SetUint64(tt.l1GasPrice)
l2GasLimit := new(big.Int).SetUint64(tt.l2GasLimit)
l2GasPrice := new(big.Int).SetUint64(tt.l2GasPrice)
fee := EncodeTxGasLimit(data, l1GasPrice, l2GasLimit, l2GasPrice)
decodedGasLimit := DecodeL2GasLimit(fee)
roundedL2GasLimit := Ceilmod(l2GasLimit, BigTenThousand)
if roundedL2GasLimit.Cmp(decodedGasLimit) != 0 {
t.Errorf("rollup fee check failed: expected %d, got %d", l2GasLimit.Uint64(), decodedGasLimit)
}
})
}
}
module github.com/ethereum-optimism/optimism/go/utils
go 1.15
require github.com/ethereum/go-ethereum v1.9.10
This diff is collapsed.
......@@ -26,6 +26,7 @@
"lint:check": "yarn lerna run lint:check",
"lint:fix": "yarn lerna run lint:fix",
"postinstall": "patch-package",
"ready": "yarn lint && yarn test",
"release": "yarn build && yarn changeset publish"
},
"dependencies": {
......
......@@ -143,15 +143,16 @@ export abstract class BatchSubmitter {
protected _shouldSubmitBatch(batchSizeInBytes: number): boolean {
const currentTimestamp = Date.now()
const isTimeoutReached =
this.lastBatchSubmissionTimestamp + this.maxBatchSubmissionTime <=
currentTimestamp
if (batchSizeInBytes < this.minTxSize) {
if (!isTimeoutReached) {
const timeSinceLastSubmission =
currentTimestamp - this.lastBatchSubmissionTimestamp
if (timeSinceLastSubmission < this.maxBatchSubmissionTime) {
this.logger.info(
'Skipping batch submission. Batch too small & max submission timeout not reached.',
{
batchSizeInBytes,
timeSinceLastSubmission,
maxBatchSubmissionTime: this.maxBatchSubmissionTime,
minTxSize: this.minTxSize,
lastBatchSubmissionTimestamp: this.lastBatchSubmissionTimestamp,
currentTimestamp,
......@@ -161,6 +162,8 @@ export abstract class BatchSubmitter {
}
this.logger.info('Timeout reached, proceeding with batch submission.', {
batchSizeInBytes,
timeSinceLastSubmission,
maxBatchSubmissionTime: this.maxBatchSubmissionTime,
lastBatchSubmissionTimestamp: this.lastBatchSubmissionTimestamp,
currentTimestamp,
})
......
......@@ -33,7 +33,7 @@ export class Watcher {
l2ToL1MsgHash: string,
pollForPending: boolean = true
): Promise<TransactionReceipt> {
return this.getTransactionReceipt(this.l2, l2ToL1MsgHash, pollForPending)
return this.getTransactionReceipt(this.l1, l2ToL1MsgHash, pollForPending)
}
public async getL2TransactionReceipt(
......@@ -73,22 +73,31 @@ export class Watcher {
msgHash: string,
pollForPending: boolean = true
): Promise<TransactionReceipt> {
let matches: ethers.providers.Log[] = []
// scan for transaction with specified message
while (matches.length === 0) {
const blockNumber = await layer.provider.getBlockNumber()
const startingBlock = Math.max(blockNumber - this.NUM_BLOCKS_TO_FETCH, 0)
const successFilter = {
const successFilter: ethers.providers.Filter = {
address: layer.messengerAddress,
topics: [ethers.utils.id(`RelayedMessage(bytes32)`)],
fromBlock: startingBlock,
fromBlock: startingBlock
}
const failureFilter = {
const failureFilter: ethers.providers.Filter = {
address: layer.messengerAddress,
topics: [ethers.utils.id(`FailedRelayedMessage(bytes32)`)],
fromBlock: startingBlock,
fromBlock: startingBlock
}
const successLogs = await layer.provider.getLogs(successFilter)
const failureLogs = await layer.provider.getLogs(failureFilter)
const logs = successLogs.concat(failureLogs)
const matches = logs.filter((log: any) => log.data === msgHash)
matches = logs.filter((log: ethers.providers.Log) => log.data === msgHash)
// exit loop after first iteration if not polling
if (!pollForPending) {
break
}
}
// Message was relayed in the past
if (matches.length > 0) {
......@@ -98,30 +107,8 @@ export class Watcher {
)
}
return layer.provider.getTransactionReceipt(matches[0].transactionHash)
}
if (!pollForPending) {
} else {
return Promise.resolve(undefined)
}
// Message has yet to be relayed, poll until it is found
return new Promise(async (resolve, reject) => {
const handleEvent = async (log: any) => {
if (log.data === msgHash) {
try {
const txReceipt = await layer.provider.getTransactionReceipt(
log.transactionHash
)
layer.provider.off(successFilter)
layer.provider.off(failureFilter)
resolve(txReceipt)
} catch (e) {
reject(e)
}
}
}
layer.provider.on(successFilter, handleEvent)
layer.provider.on(failureFilter, handleEvent)
})
}
}
......@@ -69,6 +69,7 @@
"mocha": "^8.3.2",
"pino-pretty": "^4.7.1",
"prettier": "^2.2.1",
"prom-client": "^13.1.0",
"rimraf": "^3.0.2",
"ts-node": "^9.1.1",
"typescript": "^4.2.3"
......
/* Imports: External */
import { fromHexString, EventArgsAddressSet } from '@eth-optimism/core-utils'
import { BaseService } from '@eth-optimism/common-ts'
import { BaseService, Metrics } from '@eth-optimism/common-ts'
import { JsonRpcProvider } from '@ethersproject/providers'
import { LevelUp } from 'levelup'
import { ethers, constants } from 'ethers'
import { Gauge } from 'prom-client'
/* Imports: Internal */
import { TransportDB } from '../../db/transport-db'
......@@ -21,9 +22,25 @@ import { handleEventsStateBatchAppended } from './handlers/state-batch-appended'
import { L1DataTransportServiceOptions } from '../main/service'
import { MissingElementError, EventName } from './handlers/errors'
interface L1IngestionMetrics {
highestSyncedL1Block: Gauge<string>
}
const registerMetrics = ({
client,
registry,
}: Metrics): L1IngestionMetrics => ({
highestSyncedL1Block: new client.Gauge({
name: 'data_transport_layer_highest_synced_l1_block',
help: 'Highest Synced L1 Block Number',
registers: [registry],
}),
})
export interface L1IngestionServiceOptions
extends L1DataTransportServiceOptions {
db: LevelUp
metrics: Metrics
}
const optionSettings = {
......@@ -64,6 +81,8 @@ export class L1IngestionService extends BaseService<L1IngestionServiceOptions> {
super('L1_Ingestion_Service', options, optionSettings)
}
private l1IngestionMetrics: L1IngestionMetrics
private state: {
db: TransportDB
contracts: OptimismContracts
......@@ -74,6 +93,8 @@ export class L1IngestionService extends BaseService<L1IngestionServiceOptions> {
protected async _init(): Promise<void> {
this.state.db = new TransportDB(this.options.db)
this.l1IngestionMetrics = registerMetrics(this.metrics)
this.state.l1RpcProvider =
typeof this.options.l1RpcProvider === 'string'
? new JsonRpcProvider(this.options.l1RpcProvider)
......@@ -197,6 +218,8 @@ export class L1IngestionService extends BaseService<L1IngestionServiceOptions> {
await this.state.db.setHighestSyncedL1Block(targetL1Block)
this.l1IngestionMetrics.highestSyncedL1Block.set(targetL1Block)
if (
currentL1Block - highestSyncedL1Block <
this.options.logsPerPollingInterval
......@@ -238,6 +261,10 @@ export class L1IngestionService extends BaseService<L1IngestionServiceOptions> {
lastGoodElement.blockNumber
)
this.l1IngestionMetrics.highestSyncedL1Block.set(
lastGoodElement.blockNumber
)
// Something we should be keeping track of.
this.logger.warn('recovering from a missing event', {
eventName,
......@@ -385,7 +412,7 @@ export class L1IngestionService extends BaseService<L1IngestionServiceOptions> {
for (let i = 0; i < currentL1Block; i += 1000000) {
const events = await this.state.contracts.Lib_AddressManager.queryFilter(
this.state.contracts.Lib_AddressManager.filters.AddressSet(),
this.state.contracts.Lib_AddressManager.filters.OwnershipTransferred(),
i,
Math.min(i + 1000000, currentL1Block)
)
......
/* Imports: External */
import { BaseService } from '@eth-optimism/common-ts'
import { BaseService, Metrics } from '@eth-optimism/common-ts'
import { JsonRpcProvider } from '@ethersproject/providers'
import { BigNumber } from 'ethers'
import { LevelUp } from 'levelup'
import axios from 'axios'
import bfj from 'bfj'
import { Gauge } from 'prom-client'
/* Imports: Internal */
import { TransportDB } from '../../db/transport-db'
......@@ -12,6 +13,21 @@ import { sleep, toRpcHexString, validators } from '../../utils'
import { L1DataTransportServiceOptions } from '../main/service'
import { handleSequencerBlock } from './handlers/transaction'
interface L2IngestionMetrics {
highestSyncedL2Block: Gauge<string>
}
const registerMetrics = ({
client,
registry,
}: Metrics): L2IngestionMetrics => ({
highestSyncedL2Block: new client.Gauge({
name: 'data_transport_layer_highest_synced_l2_block',
help: 'Highest Synced L2 Block Number',
registers: [registry],
}),
})
export interface L2IngestionServiceOptions
extends L1DataTransportServiceOptions {
db: LevelUp
......@@ -52,6 +68,8 @@ export class L2IngestionService extends BaseService<L2IngestionServiceOptions> {
super('L2_Ingestion_Service', options, optionSettings)
}
private l2IngestionMetrics: L2IngestionMetrics
private state: {
db: TransportDB
l2RpcProvider: JsonRpcProvider
......@@ -64,6 +82,8 @@ export class L2IngestionService extends BaseService<L2IngestionServiceOptions> {
)
}
this.l2IngestionMetrics = registerMetrics(this.metrics)
this.state.db = new TransportDB(this.options.db)
this.state.l2RpcProvider =
......@@ -113,6 +133,8 @@ export class L2IngestionService extends BaseService<L2IngestionServiceOptions> {
await this.state.db.setHighestSyncedUnconfirmedBlock(targetL2Block)
this.l2IngestionMetrics.highestSyncedL2Block.set(targetL2Block)
if (
currentL2Block - highestSyncedL2BlockNumber <
this.options.transactionsPerPollingInterval
......
/* Imports: External */
import { BaseService, Logger } from '@eth-optimism/common-ts'
import { BaseService, Logger, Metrics } from '@eth-optimism/common-ts'
import { LevelUp } from 'levelup'
import level from 'level'
......@@ -31,7 +31,6 @@ export interface L1DataTransportServiceOptions {
useSentry?: boolean
sentryDsn?: string
sentryTraceRate?: number
enableMetrics?: boolean
defaultBackend: string
}
......@@ -65,8 +64,18 @@ export class L1DataTransportService extends BaseService<L1DataTransportServiceOp
this.state.db = level(this.options.dbPath)
await this.state.db.open()
const metrics = new Metrics({
labels: {
environment: this.options.nodeEnv,
network: this.options.ethNetworkName,
release: this.options.release,
service: this.name,
}
})
this.state.l1TransportServer = new L1TransportServer({
...this.options,
metrics,
db: this.state.db,
})
......@@ -74,6 +83,7 @@ export class L1DataTransportService extends BaseService<L1DataTransportServiceOp
if (this.options.syncFromL1) {
this.state.l1IngestionService = new L1IngestionService({
...this.options,
metrics,
db: this.state.db,
})
}
......@@ -82,6 +92,7 @@ export class L1DataTransportService extends BaseService<L1DataTransportServiceOp
if (this.options.syncFromL2) {
this.state.l2IngestionService = new L2IngestionService({
...(this.options as any), // TODO: Correct thing to do here is to assert this type.
metrics,
db: this.state.db,
})
}
......
......@@ -49,7 +49,6 @@ type ethNetwork = 'mainnet' | 'kovan' | 'goerli'
useSentry: config.bool('use-sentry', false),
sentryDsn: config.str('sentry-dsn'),
sentryTraceRate: config.ufloat('sentry-trace-rate', 0.05),
enableMetrics: config.bool('enable-metrics', false),
})
await service.start()
......
......@@ -2,6 +2,7 @@
import { BaseService, Logger, Metrics } from '@eth-optimism/common-ts'
import express, { Request, Response } from 'express'
import promBundle from 'express-prom-bundle'
import { Gauge } from 'prom-client'
import cors from 'cors'
import { BigNumber } from 'ethers'
import { JsonRpcProvider } from '@ethersproject/providers'
......@@ -27,6 +28,7 @@ import { L1DataTransportServiceOptions } from '../main/service'
export interface L1TransportServerOptions
extends L1DataTransportServiceOptions {
db: LevelUp
metrics: Metrics
}
const optionSettings = {
......@@ -106,14 +108,26 @@ export class L1TransportServer extends BaseService<L1TransportServerOptions> {
private _initializeApp() {
// TODO: Maybe pass this in as a parameter instead of creating it here?
this.state.app = express()
if (this.options.useSentry) {
this._initSentry()
}
if (this.options.enableMetrics) {
this._initMetrics()
}
this.state.app.use(cors())
// Add prometheus middleware to express BEFORE route registering
this.state.app.use(
// This also serves metrics on port 3000 at /metrics
promBundle({
// Provide metrics registry that other metrics uses
promRegistry: this.metrics.registry,
includeMethod: true,
includePath: true,
})
)
this._registerAllRoutes()
// Sentry error handling must be after all controllers
// and before other error middleware
if (this.options.useSentry) {
......@@ -148,25 +162,6 @@ export class L1TransportServer extends BaseService<L1TransportServerOptions> {
this.state.app.use(Sentry.Handlers.tracingHandler())
}
/**
* Initialize Prometheus metrics collection and endpoint
*/
private _initMetrics() {
this.metrics = new Metrics({
labels: {
environment: this.options.nodeEnv,
network: this.options.ethNetworkName,
release: this.options.release,
service: this.name,
},
})
const metricsMiddleware = promBundle({
includeMethod: true,
includePath: true,
})
this.state.app.use(metricsMiddleware)
}
/**
* Registers a route on the server.
*
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment