Commit 98cd5879 authored by mergify[bot]'s avatar mergify[bot] Committed by GitHub

Merge branch 'develop' into jg/channel_bank_pruning

parents d8e30b15 225e1643
---
'@eth-optimism/indexer': patch
---
Delete unused flags
This diff is collapsed.
......@@ -22,12 +22,6 @@ var (
Required: true,
EnvVar: prefixEnvVar("BUILD_ENV"),
}
EthNetworkNameFlag = cli.StringFlag{
Name: "eth-network-name",
Usage: "Ethereum network name",
Required: true,
EnvVar: prefixEnvVar("ETH_NETWORK_NAME"),
}
ChainIDFlag = cli.StringFlag{
Name: "chain-id",
Usage: "Ethereum chain ID",
......@@ -188,7 +182,6 @@ var (
var requiredFlags = []cli.Flag{
BuildEnvFlag,
EthNetworkNameFlag,
ChainIDFlag,
L1EthRPCFlag,
L2EthRPCFlag,
......
......@@ -3,6 +3,7 @@ package genesis
import (
"encoding/json"
"errors"
"fmt"
"math/big"
"os"
"path/filepath"
......@@ -84,7 +85,7 @@ type DeployConfig struct {
func NewDeployConfig(path string) (*DeployConfig, error) {
file, err := os.ReadFile(path)
if err != nil {
return nil, err
return nil, fmt.Errorf("deploy config at %s not found: %w", path, err)
}
var config DeployConfig
......
......@@ -84,7 +84,7 @@ func (co *ChannelOut) AddBlock(block *types.Block) error {
return err
}
// We encode to a temporary buffer to determine the encoded length to
// ensure that the total size of all RLP elements is less than MAX_RLP_BYTES_PER_CHANNEL
// ensure that the total size of all RLP elements is less than or equal to MAX_RLP_BYTES_PER_CHANNEL
var buf bytes.Buffer
if err := rlp.Encode(&buf, batch); err != nil {
return err
......
package derive
import (
"bytes"
"math/big"
"testing"
"github.com/ethereum/go-ethereum/core/types"
"github.com/ethereum/go-ethereum/rlp"
"github.com/stretchr/testify/require"
)
......@@ -25,3 +27,25 @@ func TestChannelOutAddBlock(t *testing.T) {
require.Equal(t, ErrNotDepositTx, err)
})
}
// TestRLPByteLimit ensures that stream encoder is properly limiting the length.
// It will decode the input if `len(input) <= inputLimit`.
func TestRLPByteLimit(t *testing.T) {
// Should succeed if `len(input) == inputLimit`
enc := []byte("\x8bhello world") // RLP encoding of the string "hello world"
in := bytes.NewBuffer(enc)
var out string
stream := rlp.NewStream(in, 12)
err := stream.Decode(&out)
require.Nil(t, err)
require.Equal(t, out, "hello world")
// Should fail if the `inputLimit = len(input) - 1`
enc = []byte("\x8bhello world") // RLP encoding of the string "hello world"
in = bytes.NewBuffer(enc)
var out2 string
stream = rlp.NewStream(in, 11)
err = stream.Decode(&out2)
require.Equal(t, err, rlp.ErrValueTooLarge)
require.Equal(t, out2, "")
}
......@@ -8,12 +8,12 @@ import { Messenger_Initializer, Reverter, CallerCaller } from "./CommonTest.t.so
contract CrossDomainMessenger_Test is Messenger_Initializer {
// Ensure that baseGas passes for the max value of _minGasLimit,
// this is about 4 Billion.
function test_baseGas() external {
function test_baseGas() external view {
L1Messenger.baseGas(hex"ff", type(uint32).max);
}
// Fuzz for other values which might cause a revert in baseGas.
function testFuzz_baseGas(uint32 _minGasLimit) external {
function testFuzz_baseGas(uint32 _minGasLimit) external view {
L1Messenger.baseGas(hex"ff", _minGasLimit);
}
}
/data/evm-messages.json
/data/slots.json
/data/evm-addresses.json
......@@ -14,6 +14,33 @@ program
.description('CLI for querying Bedrock migration data')
.version(version)
program
.command('parse-state-dump')
.description('parses state dump to json')
.option('--file <file>', 'path to state dump file')
.action(async (options) => {
const iface = getContractInterface('OVM_L2ToL1MessagePasser')
const dump = fs.readFileSync(options.file, 'utf-8')
const addrs: string[] = []
const msgs: any[] = []
for (const line of dump.split('\n')) {
if (line.startsWith('ETH')) {
addrs.push(line.split('|')[1].replace('\r', ''))
} else if (line.startsWith('MSG')) {
const msg = '0x' + line.split('|')[2].replace('\r', '')
const parsed = iface.decodeFunctionData('passMessageToL1', msg)
msgs.push({
who: line.split('|')[1],
msg: parsed._message,
})
}
}
fs.writeFileSync('./data/evm-addresses.json', JSON.stringify(addrs, null, 2))
fs.writeFileSync('./data/evm-messages.json', JSON.stringify(msgs, null, 2))
})
program
.command('evm-sent-messages')
.description('queries messages sent after the EVM upgrade')
......
......@@ -363,11 +363,10 @@ where:
When decompressing a channel, we limit the amount of decompressed data to `MAX_RLP_BYTES_PER_CHANNEL` (currently
10,000,000 bytes), in order to avoid "zip-bomb" types of attack (where a small compressed input decompresses to a
humongous amount of data). If the decompressed data exceeds the limit, things proceeds as thought the channel contained
only the first `MAX_RLP_BYTES_PER_CHANNEL` decompressed bytes.
When decoding batches, all batches that can be completly decoded below `MAX_RLP_BYTES_PER_CHANNEL` will be accepted
even if the size of the channel is greater than `MAX_RLP_BYTES_PER_CHANNEL`.
humongous amount of data). If the decompressed data exceeds the limit, things proceeds as though the channel contained
only the first `MAX_RLP_BYTES_PER_CHANNEL` decompressed bytes. The limit is set on RLP decoding, so all batches that
can be decoded in `MAX_RLP_BYTES_PER_CHANNEL` will be accepted ven if the size of the channel is greater than
`MAX_RLP_BYTES_PER_CHANNEL`. The exact requirement is that `length(input) <= MAX_RLP_BYTES_PER_CHANNEL`.
While the above pseudocode implies that all batches are known in advance, it is possible to perform streaming
compression and decompression of RLP-encoded batches. This means it is possible to start including channel frames in a
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment