Commit e2dd14a0 authored by mergify[bot]'s avatar mergify[bot] Committed by GitHub

Merge branch 'develop' into feat/static-peer-reconnect

parents 5262f00f 38773bb1
---
'@eth-optimism/fault-detector': minor
---
Updates the fault detector to support Bedrock networks.
......@@ -5,6 +5,8 @@ import (
"fmt"
"os"
"github.com/ethereum-optimism/optimism/op-bindings/predeploys"
"github.com/ethereum-optimism/optimism/op-chain-ops/crossdomain"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/common/hexutil"
......@@ -120,6 +122,9 @@ type MigrationData struct {
func (m *MigrationData) ToWithdrawals() ([]*crossdomain.LegacyWithdrawal, error) {
messages := make([]*crossdomain.LegacyWithdrawal, 0)
for _, msg := range m.OvmMessages {
if msg.Who != predeploys.L2CrossDomainMessengerAddr {
continue
}
wd, err := msg.ToLegacyWithdrawal()
if err != nil {
return nil, err
......@@ -130,6 +135,9 @@ func (m *MigrationData) ToWithdrawals() ([]*crossdomain.LegacyWithdrawal, error)
}
}
for _, msg := range m.EvmMessages {
if msg.Who != predeploys.L2CrossDomainMessengerAddr {
continue
}
wd, err := msg.ToLegacyWithdrawal()
if err != nil {
return nil, err
......
......@@ -3,7 +3,6 @@ package eth
import (
"bytes"
"fmt"
"math/big"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/common/hexutil"
......@@ -52,12 +51,16 @@ func (res *AccountResult) Verify(stateRoot common.Hash) error {
if err != nil {
return fmt.Errorf("failed to verify storage value %d with key %s (path %x) in storage trie %s: %w", i, entry.Key, path, res.StorageHash, err)
}
if !bytes.Equal(val, val) {
comparison, err := rlp.EncodeToBytes(entry.Value.ToInt().Bytes())
if err != nil {
return fmt.Errorf("failed to encode storage value %d with key %s (path %x) in storage trie %s: %w", i, entry.Key, path, res.StorageHash, err)
}
if !bytes.Equal(val, comparison) {
return fmt.Errorf("value %d in storage proof does not match proven value at key %s (path %x)", i, entry.Key, path)
}
}
accountClaimed := []any{uint64(res.Nonce), (*big.Int)(res.Balance).Bytes(), res.StorageHash, res.CodeHash}
accountClaimed := []any{uint64(res.Nonce), res.Balance.ToInt().Bytes(), res.StorageHash, res.CodeHash}
accountClaimedValue, err := rlp.EncodeToBytes(accountClaimed)
if err != nil {
return fmt.Errorf("failed to encode account from retrieved values: %w", err)
......
......@@ -2,42 +2,86 @@ package eth
import (
"encoding/json"
"math/big"
"testing"
"github.com/ethereum/go-ethereum/common/hexutil"
"github.com/ethereum/go-ethereum/common"
"github.com/stretchr/testify/require"
)
func TestAccountResult_Verify(t *testing.T) {
// Example account result: a SystemConfig storage read, to proof the value of slot 103 (0x67) equals some address,
// which would be RLP-encoded when retrieved as value from a trie node.
resultData := `
// Example account result taken from the Goerli SystemConfig proxy:
// cast proof 0xAe851f927Ee40dE99aaBb7461C00f9622ab91d60 0x65a7ed542fb37fe237fdfbdd70b31598523fe5b32879e307bae27a0bd9581c08 --block 8481106
// The provided slot is the unsafe block signer.
const resultData = `
{
"accountProof": [
"0xf90211a03d0bbcace6414d254eb5ee34923da6dadda532025554251fccb0f3e401f97f64a00b0060669e7d8bee64b69cd327edd83aec3839bcdc2c51df61a87d26c06a6e6da0c0325f24cc335b26d107c0704a0c88d96aac42cb807f732d45196ba2b3ec6f4fa0be4d5280fb18316c250432acd5272e0558a850e2b17803e2262a0f12ec7293cda0584c838d31e2b8b7b1b61ea86ec96c14de6f8e48b5085ab88f26022e46c434f3a033ce2881dfbf590f36e88d999b52b5f88fa3e2845aa069f354fffc4657269db0a0bc0d89addbe9e9dd1b4570e6403b9540c631d4b018d6a30dee0d6b1416d79f7da0b208a7628b6e33e21638856ae0cf77f0245aab58300a3ea459b049fcbcf7c95aa01d16509886c5fef9deee332e82ca26739b35acfc192c4fd5f31310caf9996304a062a1114d555e0657a953a9e79156ecf6323629961afe756c846a1222c415999aa0e3ae9195242e45330d273187de63ccd74c2ab787dc50e0ff3edbc8ef47cd3b2da0c1b261a54efdfa3596cec84a23784ed051bc346b80dbf0694dc7d39356c212eea04512835bd07815d2bcc00f25ea70dee0f0a770b16d16250f7d5c884b0d440581a0ee25f14adcbd0dc55d15a2148deae12c3ab9c6a001c6e899e22d3e0890999083a01c0c47189ac3d6931aa05fbe3cd4140a1794f68b724b1804f46d2705435f08caa0d9a14e2b4e6778a2405bc7931934b5e97b8e3a2033eedb30409896420a2db44b80",
"0xf90211a0e65e35c2066e63b933bd300ad97883dbcf4e2d2df8a540aa3964f5354e2bb8dba0814e82409c1813007af146f3b47f0e0d6da4c2885de8f34f034826bcb4c4c9e7a07d726afe2a20f4928e8715d80bc5aff5fc30c05b1e953372d564cba93d3b45bda02fc8df4a480ea0402f48aa206047ef0acbedf1a7033153c60d5ec4dfb9ee30cca05ecda76647515fb8342434b5a36faec2f17fc729ea369edcf7a42cd235c51fa2a0ab51933d2bde083801bd7fedb17ec22ae5231e508c62d66e9dccbd598c1d98fda0e2af2dc4a1b87147a5e388388ee5d1aafb415498e1011e87df127cc8d57616eca00bb9b95cfda6476e321ca441f523f80cb13519e537679ad6139600b35c91b5b1a0e3aae75bdc65365e2f8bb9c8646a4118d302076e0d9de95fc4425e82680ed912a0124bb32fc404a8647d7192f438a2bca8c52877b02a921b014fe4a192982bcf9ea0da2df7e5f41d6e38cc48fd9d0ca91bc96ff6ffa336b75784b3e23cd8dfddc0b1a0f7fc4a6792cdf964423f705935f0338128d099c83a74c2b76c688ecda8f23ffea009b2347ed0222d713137ad280310fad2ca92f007b5fd3ed1b768a6b52b5040d6a07b5bfe1bbd07575f26d97b9a0e8c67cd11d251f3d8bc823ade462daee2309e4ba02d2e464a4ad06d6dca6cb54e31fb5aac85dc9c5bab420a2c7c1ca986991f4d1da0cd5f723720652dec2a8bbba3bad3f73fb16b83a31d28a707fd0e0f85370eae6380",
"0xf90151a04df2503da3b2491b68d13d2735005e451ca9cae77d336061df9250e0d3592e0ca01580374cce4fa8401d9fdeebb037e96c3f9c03873aec0637b43ba809fa8e53a9a0101d14d18ee6c9bf21f8292b155e40462605d15d315337e0f1098479d9ba6d1b80a0d0baf298792955a012fc458a97de8b3b2319911c3a82e427b5424fb544ac1ea080a09b2cc83f7e69b3c46bde63e4805d1aa9a064c0220532de547693122bba9b24458080a0a44850b8b25ced24d074182424ab7aa668b78acaa64e2da1854d04093332153b80a00b513b7944399ac7e2291ab7cdc1b99f0445256cc3f0c3b6b5c2d50eab4f9887a06b979f3c7a2a1d95af02c44d015ac42202cd0df8c19b629f6089a6a2a181d69aa0eb0216e118a1b797b90165cea439923e58bc672b81573ae44156cf38632fa36880a0e16e6a4603316f2fa41cd7d0a8fc9a8dca7ca36763abe39b99ef57b83adf501180",
"0xf8689f3b8dff764734b5790096e811fc30835b13a5bea290831c7c6722f325a5f818b846f8448080a0dd6d0b4dba95e79439c501e2109ded26989a53ca4641e1f48bf702e7361637e9a01f958654ab06a152993e7a0ae7b6dbb0d4b19265cc9337b8789fe1353bd9dc35"
],
"address": "0x6900000000000000000000000000000000000009",
"balance": "0x0",
"codeHash": "0x1f958654ab06a152993e7a0ae7b6dbb0d4b19265cc9337b8789fe1353bd9dc35",
"nonce": "0x0",
"storageHash": "0xdd6d0b4dba95e79439c501e2109ded26989a53ca4641e1f48bf702e7361637e9",
"storageProof": [
{
"key": "0x0000000000000000000000000000000000000000000000000000000000000067",
"value": "0x9965507d1a55bcc2695c58ba16fb37d819b0a4dc",
"proof": [
"0xf8f18080a04fc5f13ab2f9ba0c2da88b0151ab0e7cf4d85d08cca45ccd923c6ab76323eb2880a0f57febb7b16455e051f412a56e54016c676a3d4aa515d2e77a90520dfe36162ea0558f72e6d0e3b401856defa90b07dd0442282592b3ca718e2dc919ea53b8e69280a04f893abcf66ae78abb4ac986ddf78bdf95a7b15079be06cc5756f78d772271eba0c1529c7d0f249fd7060e930515ac4980103920979274f56b07419bf33be4d3d7a0a055722fdc9281d825dfc17c2ae775aba5b283954f5c484fc7e0e5a148131e2ea02833bc13e1f58010a678009d7d5982b892b3ba1432ca6b06f8a849b71491b51e808080808080",
"0xf7a03787eeb91fe3101235e4a76063c7023ecb40f923f97916639c598592fa30d6ae95949965507d1a55bcc2695c58ba16fb37d819b0a4dc"
]
}
]
}
"address": "0xae851f927ee40de99aabb7461c00f9622ab91d60",
"balance": "0x0",
"codeHash": "0x1f958654ab06a152993e7a0ae7b6dbb0d4b19265cc9337b8789fe1353bd9dc35",
"nonce": "0x1",
"storageHash": "0x88219055c2fef8800e02f071d053a86a4194e70a81b6e45f1fecca7dae0432da",
"accountProof": [
"0xf90211a063a66cd84a54f8ee248662f1d4637936c430a0f455eeec8c01ee56db898dddfba0be9003fb3e36a55cfea1eda010c0a459f10729db9809e0bd1e3599f46c5ffed1a0a08d018d3cf38b0d0cbff14288699705dfa7cf27dc20fbbaae9351837eff4751a0eed877086740a930f035b75ebb26ce63df0f61baea52bf05f4c7421014debf33a053ea34e49423e790b10d9a36f498f337b3f079ed611d98a3f8550c34212dcbd7a0c370d5b874f70b9fd1c8a2fe98b0ef60c480fbe00566a7d5a5e682d9859398f2a0da820e94aac0b444a8dcfebc7dc9ec942f04f252da25b10faf50b57f969aa1f5a0413e8039c67d8acbe20993ab364c2c477d1ce85e8ae723c33acd506175ce4bffa0f70e5d5d934c53b2302ec3f98bd3f33f39a15fabb8c32e5e7acc97121d7a9cf3a0b41e7073ae943e498681b5d86941401c29b38c93fa347ace6bb15ba74ccbf45ea0a3b0aa548cac9cbbfcfabd980c1ceae8bdc39ad2682fc6e6d9cf0f4bdb273884a04d7932870a3d25163ea28ae5ebe702b841d755541d2af98c5c1c08090327fab1a06e41c3fb6362dd860a098aacf13a81c9d26e9b822c1066ca76cb98607f3e257aa0079ffe59ddb21ccd03bcbf1cc42fc0fb89dcae93ffeed9b82a848828199ab057a0dce67e92c8991df57ecac2237244d12e92f6514db1c5f076718fe40266bbf741a08dd7d3b3b041889f837217761b4e87510428ea41b3aff4e5725fd8efc2d735b980",
"0xf90211a0809683f3310d75dff5eb95296aa9ff5d74fbde9f873b9a6b245513887f9c6e91a055450f5338cc2f8f4306912e938df3fe490929614604eeea4c03581b98c8ae8ea04e50b57da8fc16a5d5460892196631737eeb1cc1e995e5c1de9c381ed1fb84d4a07d65e61a50579d689422446c23df10c4c0b5ec41239a910ca86634e2fee75320a091c77e1f72302bdb3985b249dba07d1abaa345296080c369bd84c518669297e1a019a185bedc83ab48c51dffe4c58ab88e30c88976a3b059ab524ef7ab42886d61a0a6c249e070db991141ee1289a5ed212f81673f8cd3f7bf35c27c335cc77d3eeca0c7d7a7f5036c8c3185cd0ca231775047192419b8f7e7b5a462c8e713ab2f4fcda006084fdd6777d076850defc5c6f1336535bbc2ec95a0e3f91fc5ac9761aee770a0c85a82f527990667217fac36ebfb9f4af29a6ff7b0b3d41cdcb256a26ca5f621a06a382d1f5a9bb0b712c89e82b0aaf26cf7c5984255377fd7428457d390330d40a0194f1f730e71559662ea2d9bdc681761eaf54decc7041766b5d7b7e8086d2480a05afe23c9ec57c22d9639f9228aa389e7a70a4e1e3e675856792f4a92fe284478a05bcacd2d3d2ac267d5b0367b56f05e4c808e2a5ecd04a10f1399e313fd41b273a09e62b6f5b7b77a1657ded9f0bef2af7fee11f2bf0518a5cceb5ceae2845c16f0a06d0ee25c5a3acd2b8d3253b856a77187b76f90d60b2356fc77f6e79766410cc580",
"0xf90211a0a6b81aae9b8aff6ac275885f6dfa4bc11949e3e8cbfad05714c3233303fa83f5a0e29595c647574b219c3068a768d47347b0e8a272da881aeb4525af051faab847a0441c1549c250c0c1bc0fa1b73e9f9ac9998b5dcef65a57ecd3f748ce02be4251a0353bd042ac0cf9a90a9cc02cc131f5d58f531df8df7ab752f6caa9b6807a506ea07340f489ba55fc8cfde61384c4990f74034f0bc0c7e1d68733284cb5c30d5bbea00ff5d4191ef973be9ae73b3fd9d01f52b54aafa20f147b6a5ca6b9e56a1f9ec4a0e167cd5a249a0dc2afbb9b2aafbd3b6e0160739a99e482d22d722c78fa296772a004202f2695770715d36e9aad418cc005fd8b22b927f1e1383b4e95ca18f41f61a0be38b6340286e0cd2454d90d8ed2f7e26bce5b7774f8adfa8f54a75bc4635d18a0cacc635e487a0d7dd19373bcd0a32e4cea0655f93d61f2940a6063059a044bf7a0bcd8f9ab88356e86cea7cd27454525ade016bccf26f414ad9fa93e0280d40df4a0d5651902739f9dfaff0f1178ea7cba617087234dd0e2895424961fad98605a27a0f76890befb5b3b20695d64b6a7c416709c93032012b46245c5bc00dd104b84f3a00ff372b11e0fb8febd467e060f7ce126e705a07a203a3f6dd93c7e3f36f4608ea0b4ea8133548c9b9d8f62b86aa703f65e3323a92a4b4711f80a734b80814b0825a04db29c4cb760e4831bfe40cdb0f554d74e98da26715c7e6319317c8c9a9c247580",
"0xf90211a026ffcc82ed6e3cd13ea30ed185afae29eed7f7fbde7f46010061791b5441b7dfa086b3018a2c001ffd6cc76e58372c49f5a2ba42335789fdcea878d93ceeeeb969a0589ba5e683afa655b17eb6b6c687a657669f772b1a2f78813ea662e8c316c12ea01c604e2e2f9ace5ef281f09c4b6c24c4c4631810f30b5209a433515a628cb5aca0520abee45bbc79e9f9519ffd4ad199b40383cb9718a3e8392d7193f68b1bc251a0b788e74186f121dd5ad31ef6b69d69147ab1841aa5380928fbe11a65ad67af36a0ef80a7fd5edf9901e2d8fa0cd8d9608e9fde114da1bd0f545e107c6771d5b0e7a05e8d9b24b83dbb8ec946cd42ff04bd0588f15866cd95095a8495242616b9ae71a0d623ee5bd0f3b8513ad7c247d1736841878f7210445209cecf36f0bfa5b8a6b9a03d0b62b3dc96b9c72190ff3484699d4892dea93cd16d9811cd58bd614348db11a0b140f98169be15dc1266be9343a1225fe6339f86e309854b03af9d304e75bd76a04ca100367dd9f12a6e80f48a1fabc19d9d36f07960d1911c3a09199a43eb26d2a05e9c627adafc5393a9b5ddc910f6474c56a10366f9d44248d9c0ce2e0c6b9a94a097e533731c36c43d7cf20379f2349ac1cd7a1165fb3588432be8d315801b2e80a0765168ad98f52483060045ae5208451078b2e6876a6f90d40a5c3e3f31cc559ba0479dd4f67d939fa21dd0528703a68c933f8a3d8e504d48f8c9bf7c41e92deecd80",
"0xf90211a04232cef0e6c4bbd5969f864233a23762543460900e04868931685e0148ae2d10a05353ae18ba63650d7281fefa6fb545b7314cadafd459eed25c7db4915d834e95a022fe8bbf3b304ea8fa6e0cb69c9a3a05cdcf0c3542a5e389a9518177a1925bdca0377ac9d4284000e1f98327783989043f4a6b59d48f5a80579c71adfd880f651ea049da166e0ceb03cf24a2cc03b3bd5e862eddd540a2c517493125322b3a30e85ba0aa9980b3bf84ce0b360f10ca3b230b5dbc9eecba684ed1add96b23167728574ea0f28a3be0e42f13e78f306970fd3a1aac286b30af8af1f460e50eba1d879d61b8a0c84f2fd48976ee7662adc809abb439ea056b3615b622f2938b597782501a4279a0ca13452ffbe75eedde1d870340997ce269c83f6642eefa2d4e9d6bd21c8fc838a0dd918c25e25823548a6a31edb27b65421b2b77063cdc71b13c43eed15b86b924a01a4d8ab05ce030242b59014d96fe1adca52c3f5d13eb09feefbf6eaf97e6fcfba09187e247644a19fe62860dba6e2317f40fe9907c8101bf9e1b04e4b5dadb8ec4a02c299cdc9b87c7f3b1402627f9bcc488d8655a6cbc5d458155024dc8be90ea7aa0373f215d7bc10a74a8e11ddbd3395e27d55cfab62a433b2c6961c1beee9ff3c8a04ec09787d6040119700a0d38154d4a589e1d62245fcd685768cd265cda5ee576a00086a240676e913c0b969397fbc72191719834bc533ba4601406ea062ea76f9b80",
"0xf90151808080a0ae1018f6569474784bbb933125e397f72f160cb86bf9528ba522e2957e6b27b6a07e10da74c2d11b8dda5b0127b4b39a0d7a1f4a1c9f0dc1a05ae1f3fa3346c86ba0884fa49d5faae435667fe982950ccf82aa58a148dffdb99c5eb7da6b01fd9b00a0065e97ea5d45a492c2aa8eade7534551a04e7899f0bcebeeccc42a1cb2292ce3a0c3a2aae48ed7395cc59065eedd5cb40d9a0cb02db9a9afaccd27efd6282464eb808080a0fc9e1fdc7239d8adc047265bb6589ddefac9a63c1c9829ef2b4717a4b9000dd7a0c285558e316f3ea0ceb2ca5681a79e5d3e3d6d6f21054d5056a6e9ad7dcdd6c7a0de8e2f7f5743997eabe69cb1d99ef0aec670da0b31b466bd8e14d24df17542d6a026ad23a1ed5a6f66a4e6e64fa1b3c37c0878975ba0b8872f5d8ae7c215a0f9c5a0f0ac72c6fc609e78ca13cefea04ef39ff7c9c49198a641508bf7d51bc997239180",
"0xf851808080808080a0292e7aa7b0fa371f45a26562a180d952f2f3bd3d7a67eb019747b10876cd61a6a0c7f2b75df52f531ca04c4b7c6449bb8be8eae52bf543dfb78383eda4625d922e808080808080808080",
"0xf8669d37118893aaaf73153bacee2bbd50b8234ab255361cc8614a5713b77282b846f8440180a088219055c2fef8800e02f071d053a86a4194e70a81b6e45f1fecca7dae0432daa01f958654ab06a152993e7a0ae7b6dbb0d4b19265cc9337b8789fe1353bd9dc35"
],
"storageProof": [
{
"key": "0x65a7ed542fb37fe237fdfbdd70b31598523fe5b32879e307bae27a0bd9581c08",
"proof": [
"0xf901118080a04fc5f13ab2f9ba0c2da88b0151ab0e7cf4d85d08cca45ccd923c6ab76323eb28a09d1f77882a1c2e804de950478b4fdec793decb817e7bbe24a2afd23eb000d648a0f57febb7b16455e051f412a56e54016c676a3d4aa515d2e77a90520dfe36162ea0dce964c738816bb26d659513b793496cac2279d100812e6441aae3f7ffefce2080a0d5223d0cc181c8c0cd1babb8cd0b4d6433eab19a9fcc7836681589aad346556fa0c61ebce1cecbc190ee1163d0ff9ff456cb1fe3409dc546bf2f9118662e6db892a024513ee2bee3b30d4b4e4b600b5a98db38db03f6db556f492d24ac0ff9d6c98fa019bbead828fb8baf57dfda3a30a0b6da048e31faee39f5a76a99b51f28c6c512808080808080",
"0xf7a031a88f3936348d602f3078126bdcd162c575cb17fb9bbfe2dab00b167bd295c39594715b7219d986641df9efd9c7ef01218d528e19ec"
],
"value": "0x715b7219d986641df9efd9c7ef01218d528e19ec"
}
]
}
`
var goodRoot = common.HexToHash("0x070ef87d6d3a8a132dfb45cbbc86daf545a45f1a0263bd28a304e465327f3557")
func TestAccountResult_Verify(t *testing.T) {
result := makeResult(t)
require.NoError(t, result.Verify(goodRoot), "verifies against good state root")
require.NotNil(t, result.Verify(common.HexToHash("0x070ef87d6d3a8a132dfb45cbbc86daf545a45f1a0263bd28a304e465327f3558")), "does not verify against other state root")
result = makeResult(t)
result.StorageProof[0].Proof[0][0] = 0x00
require.NotNil(t, result.Verify(goodRoot), "does not verify against bad proof")
result = makeResult(t)
result.AccountProof[0][0] = 0x00
require.NotNil(t, result.Verify(goodRoot), "does not verify against bad proof")
}
func FuzzAccountResult_StorageProof(f *testing.F) {
f.Fuzz(func(t *testing.T, key []byte, value []byte) {
result := makeResult(t)
result.StorageProof[0].Key = common.BytesToHash(key)
result.StorageProof[0].Value = hexutil.Big(*(new(big.Int).SetBytes(value)))
require.NotNil(t, result.Verify(goodRoot), "does not verify against bad proof")
})
}
func FuzzAccountResult_AccountProof(f *testing.F) {
f.Fuzz(func(t *testing.T, address []byte, balance []byte, codeHash []byte, nonce uint64, storageHash []byte) {
result := makeResult(t)
result.Address = common.BytesToAddress(address)
bal := hexutil.Big(*new(big.Int).SetBytes(balance))
result.Balance = &bal
result.CodeHash = common.BytesToHash(codeHash)
result.Nonce = hexutil.Uint64(nonce)
result.StorageHash = common.BytesToHash(storageHash)
require.NotNil(t, result.Verify(goodRoot), "does not verify against bad account proof")
})
}
func makeResult(t *testing.T) AccountResult {
var result AccountResult
require.NoError(t, json.Unmarshal([]byte(resultData), &result))
require.NoError(t, result.Verify(common.HexToHash("0xb3a98a923c23cf25cbe04485f55243b37b29b7e12760bd24368ace23bf370e7a")), "verifies against good state root")
require.NotNil(t, result.Verify(common.HexToHash("0xb3a98a923c23cf25cbe04485f55243b37b29b7e12760bd24368ace23bf370e7b")), "does not verify against other state root")
return result
}
ignores: [
"@babel/eslint-parser",
"@types/level",
"@typescript-eslint/parser",
"eslint-plugin-import",
"eslint-plugin-unicorn",
......
......@@ -52,7 +52,7 @@
"ethers": "^5.7.0",
"express": "^4.17.1",
"express-prom-bundle": "^6.3.6",
"level": "^6.0.1",
"level6": "npm:level@^6.0.1",
"levelup": "^4.4.0"
},
"devDependencies": {
......
/* Imports: External */
import { BaseService, LegacyMetrics } from '@eth-optimism/common-ts'
import { LevelUp } from 'levelup'
import level from 'level'
import level from 'level6'
import { Counter } from 'prom-client'
/* Imports: Internal */
......
import { Contract } from 'ethers'
import { Contract, BigNumber } from 'ethers'
export interface OutputOracle<TSubmissionEventArgs> {
contract: Contract
filter: any
getTotalElements: () => Promise<BigNumber>
getEventIndex: (args: TSubmissionEventArgs) => BigNumber
}
/**
* Partial event interface, meant to reduce the size of the event cache to avoid
......@@ -41,27 +48,32 @@ const getCache = (
}
/**
* Updates the event cache for the SCC.
* Updates the event cache for a contract and event.
*
* @param scc The State Commitment Chain contract.
* @param contract Contract to update cache for.
* @param filter Event filter to use.
*/
export const updateStateBatchEventCache = async (
scc: Contract
export const updateOracleCache = async <TSubmissionEventArgs>(
oracle: OutputOracle<TSubmissionEventArgs>
): Promise<void> => {
const cache = getCache(scc.address)
const cache = getCache(oracle.contract.address)
let currentBlock = cache.highestBlock
const endingBlock = await scc.provider.getBlockNumber()
const endingBlock = await oracle.contract.provider.getBlockNumber()
let step = endingBlock - currentBlock
let failures = 0
while (currentBlock < endingBlock) {
try {
const events = await scc.queryFilter(
scc.filters.StateBatchAppended(),
const events = await oracle.contract.queryFilter(
oracle.filter,
currentBlock,
currentBlock + step
)
// Throw the events into the cache.
for (const event of events) {
cache.eventCache[event.args._batchIndex.toNumber()] = {
cache.eventCache[
oracle.getEventIndex(event.args as TSubmissionEventArgs).toNumber()
] = {
blockNumber: event.blockNumber,
transactionHash: event.transactionHash,
args: event.args,
......@@ -97,15 +109,15 @@ export const updateStateBatchEventCache = async (
/**
* Finds the Event that corresponds to a given state batch by index.
*
* @param scc StateCommitmentChain contract.
* @param oracle Output oracle contract
* @param index State batch index to search for.
* @returns Event corresponding to the batch.
*/
export const findEventForStateBatch = async (
scc: Contract,
export const findEventForStateBatch = async <TSubmissionEventArgs>(
oracle: OutputOracle<TSubmissionEventArgs>,
index: number
): Promise<PartialEvent> => {
const cache = getCache(scc.address)
const cache = getCache(oracle.contract.address)
// Try to find the event in cache first.
if (cache.eventCache[index]) {
......@@ -113,7 +125,7 @@ export const findEventForStateBatch = async (
}
// Update the event cache if we don't have the event.
await updateStateBatchEventCache(scc)
await updateOracleCache(oracle)
// Event better be in cache now!
if (cache.eventCache[index] === undefined) {
......@@ -126,23 +138,23 @@ export const findEventForStateBatch = async (
/**
* Finds the first state batch index that has not yet passed the fault proof window.
*
* @param scc StateCommitmentChain contract.
* @param oracle Output oracle contract.
* @returns Starting state root batch index.
*/
export const findFirstUnfinalizedStateBatchIndex = async (
scc: Contract
export const findFirstUnfinalizedStateBatchIndex = async <TSubmissionEventArgs>(
oracle: OutputOracle<TSubmissionEventArgs>,
fpw: number
): Promise<number> => {
const fpw = (await scc.FRAUD_PROOF_WINDOW()).toNumber()
const latestBlock = await scc.provider.getBlock('latest')
const totalBatches = (await scc.getTotalBatches()).toNumber()
const latestBlock = await oracle.contract.provider.getBlock('latest')
const totalBatches = (await oracle.getTotalElements()).toNumber()
// Perform a binary search to find the next batch that will pass the challenge period.
let lo = 0
let hi = totalBatches
while (lo !== hi) {
const mid = Math.floor((lo + hi) / 2)
const event = await findEventForStateBatch(scc, mid)
const block = await scc.provider.getBlock(event.blockNumber)
const event = await findEventForStateBatch(oracle, mid)
const block = await oracle.contract.provider.getBlock(event.blockNumber)
if (block.timestamp + fpw < latestBlock.timestamp) {
lo = mid + 1
......
......@@ -9,21 +9,23 @@ import {
import { getChainId, sleep, toRpcHexString } from '@eth-optimism/core-utils'
import { CrossChainMessenger } from '@eth-optimism/sdk'
import { Provider } from '@ethersproject/abstract-provider'
import { Contract, ethers, Transaction } from 'ethers'
import { ethers, Transaction } from 'ethers'
import dateformat from 'dateformat'
import { version } from '../package.json'
import {
findFirstUnfinalizedStateBatchIndex,
findEventForStateBatch,
updateStateBatchEventCache,
PartialEvent,
OutputOracle,
updateOracleCache,
} from './helpers'
type Options = {
l1RpcProvider: Provider
l2RpcProvider: Provider
startBatchIndex: number
bedrock: boolean
}
type Metrics = {
......@@ -34,7 +36,7 @@ type Metrics = {
type State = {
fpw: number
scc: Contract
oo: OutputOracle<any>
messenger: CrossChainMessenger
highestCheckedBatchIndex: number
diverged: boolean
......@@ -65,6 +67,12 @@ export class FaultDetector extends BaseServiceV2<Options, Metrics, State> {
desc: 'Batch index to start checking from',
public: true,
},
bedrock: {
validator: validators.bool,
default: false,
desc: 'Whether or not the service is running against a Bedrock chain',
public: true,
},
},
metricsSpec: {
highestBatchIndex: {
......@@ -103,24 +111,42 @@ export class FaultDetector extends BaseServiceV2<Options, Metrics, State> {
l2SignerOrProvider: this.options.l2RpcProvider,
l1ChainId: await getChainId(this.options.l1RpcProvider),
l2ChainId: await getChainId(this.options.l2RpcProvider),
bedrock: this.options.bedrock,
})
// Not diverged by default.
this.state.diverged = false
// We use this a lot, a bit cleaner to pull out to the top level of the state object.
this.state.scc = this.state.messenger.contracts.l1.StateCommitmentChain
this.state.fpw = (await this.state.scc.FRAUD_PROOF_WINDOW()).toNumber()
this.state.fpw = await this.state.messenger.getChallengePeriodSeconds()
if (this.options.bedrock) {
const oo = this.state.messenger.contracts.l1.L2OutputOracle
this.state.oo = {
contract: oo,
filter: oo.filters.OutputProposed(),
getTotalElements: async () => oo.latestOutputIndex(),
getEventIndex: (args) => args.l2OutputIndex,
}
} else {
const oo = this.state.messenger.contracts.l1.StateCommitmentChain
this.state.oo = {
contract: oo,
filter: oo.filters.StateBatchAppended(),
getTotalElements: async () => oo.getTotalBatches(),
getEventIndex: (args) => args._batchIndex,
}
}
// Populate the event cache.
this.logger.info(`warming event cache, this might take a while...`)
await updateStateBatchEventCache(this.state.scc)
await updateOracleCache(this.state.oo)
// Figure out where to start syncing from.
if (this.options.startBatchIndex === -1) {
this.logger.info(`finding appropriate starting height`)
const firstUnfinalized = await findFirstUnfinalizedStateBatchIndex(
this.state.scc
this.state.oo,
this.state.fpw
)
// We may not have an unfinalized batches in the case where no batches have been submitted
......@@ -129,7 +155,7 @@ export class FaultDetector extends BaseServiceV2<Options, Metrics, State> {
if (firstUnfinalized === undefined) {
this.logger.info(`no unfinalized batches found, starting from latest`)
this.state.highestCheckedBatchIndex = (
await this.state.scc.getTotalBatches()
await this.state.oo.getTotalElements()
).toNumber()
} else {
this.state.highestCheckedBatchIndex = firstUnfinalized
......@@ -141,6 +167,14 @@ export class FaultDetector extends BaseServiceV2<Options, Metrics, State> {
this.logger.info(`starting height`, {
startBatchIndex: this.state.highestCheckedBatchIndex,
})
// Set the initial metrics.
this.metrics.highestBatchIndex.set(
{
type: 'checked',
},
this.state.highestCheckedBatchIndex
)
}
async routes(router: ExpressRouter): Promise<void> {
......@@ -154,7 +188,7 @@ export class FaultDetector extends BaseServiceV2<Options, Metrics, State> {
async main(): Promise<void> {
let latestBatchIndex: number
try {
latestBatchIndex = (await this.state.scc.getTotalBatches()).toNumber()
latestBatchIndex = (await this.state.oo.getTotalElements()).toNumber()
} catch (err) {
this.logger.error(`got error when connecting to node`, {
error: err,
......@@ -189,7 +223,7 @@ export class FaultDetector extends BaseServiceV2<Options, Metrics, State> {
let event: PartialEvent
try {
event = await findEventForStateBatch(
this.state.scc,
this.state.oo,
this.state.highestCheckedBatchIndex
)
} catch (err) {
......@@ -206,34 +240,6 @@ export class FaultDetector extends BaseServiceV2<Options, Metrics, State> {
return
}
let batchTransaction: Transaction
try {
batchTransaction = await this.options.l1RpcProvider.getTransaction(
event.transactionHash
)
} catch (err) {
this.logger.error(`got error when connecting to node`, {
error: err,
node: 'l1',
section: 'getTransaction',
})
this.metrics.nodeConnectionFailures.inc({
layer: 'l1',
section: 'getTransaction',
})
await sleep(15000)
return
}
const [stateRoots] = this.state.scc.interface.decodeFunctionData(
'appendStateBatch',
batchTransaction.data
)
const batchStart = event.args._prevTotalElements.toNumber() + 1
const batchSize = event.args._batchSize.toNumber()
const batchEnd = batchStart + batchSize
let latestBlock: number
try {
latestBlock = await this.options.l2RpcProvider.getBlockNumber()
......@@ -251,55 +257,80 @@ export class FaultDetector extends BaseServiceV2<Options, Metrics, State> {
return
}
if (latestBlock < batchEnd) {
this.logger.info(`node is behind, waiting for sync`, {
batchEnd,
latestBlock,
})
return
}
if (this.options.bedrock) {
if (latestBlock < event.args.l2BlockNumber.toNumber()) {
this.logger.info(`node is behind, waiting for sync`, {
batchEnd: event.args.l2BlockNumber.toNumber(),
latestBlock,
})
return
}
// `getBlockRange` has a limit of 1000 blocks, so we have to break this request out into
// multiple requests of maximum 1000 blocks in the case that batchSize > 1000.
let blocks: any[] = []
for (let i = 0; i < batchSize; i += 1000) {
let newBlocks: any[]
let targetBlock: any
try {
newBlocks = await (
targetBlock = await (
this.options.l2RpcProvider as ethers.providers.JsonRpcProvider
).send('eth_getBlockRange', [
toRpcHexString(batchStart + i),
toRpcHexString(batchStart + i + Math.min(batchSize - i, 1000) - 1),
).send('eth_getBlockByNumber', [
toRpcHexString(event.args.l2BlockNumber.toNumber()),
false,
])
} catch (err) {
this.logger.error(`got error when connecting to node`, {
error: err,
node: 'l2',
section: 'getBlockRange',
section: 'getBlock',
})
this.metrics.nodeConnectionFailures.inc({
layer: 'l2',
section: 'getBlockRange',
section: 'getBlock',
})
await sleep(15000)
return
}
blocks = blocks.concat(newBlocks)
}
let messagePasserProofResponse: any
try {
messagePasserProofResponse = await (
this.options.l2RpcProvider as ethers.providers.JsonRpcProvider
).send('eth_getProof', [
this.state.messenger.contracts.l2.BedrockMessagePasser.address,
[],
toRpcHexString(event.args.l2BlockNumber.toNumber()),
])
} catch (err) {
this.logger.error(`got error when connecting to node`, {
error: err,
node: 'l2',
section: 'getProof',
})
this.metrics.nodeConnectionFailures.inc({
layer: 'l2',
section: 'getProof',
})
await sleep(15000)
return
}
const outputRoot = ethers.utils.solidityKeccak256(
['uint256', 'bytes32', 'bytes32', 'bytes32'],
[
0,
targetBlock.stateRoot,
messagePasserProofResponse.storageHash,
targetBlock.hash,
]
)
for (const [i, stateRoot] of stateRoots.entries()) {
if (blocks[i].stateRoot !== stateRoot) {
if (outputRoot !== event.args.outputRoot) {
this.state.diverged = true
this.metrics.isCurrentlyMismatched.set(1)
this.logger.error(`state root mismatch`, {
blockNumber: blocks[i].number,
expectedStateRoot: blocks[i].stateRoot,
actualStateRoot: stateRoot,
blockNumber: targetBlock.number,
expectedStateRoot: event.args.outputRoot,
actualStateRoot: outputRoot,
finalizationTime: dateformat(
new Date(
(ethers.BigNumber.from(blocks[i].timestamp).toNumber() +
(ethers.BigNumber.from(targetBlock.timestamp).toNumber() +
this.state.fpw) *
1000
),
......@@ -308,8 +339,99 @@ export class FaultDetector extends BaseServiceV2<Options, Metrics, State> {
})
return
}
} else {
let batchTransaction: Transaction
try {
batchTransaction = await this.options.l1RpcProvider.getTransaction(
event.transactionHash
)
} catch (err) {
this.logger.error(`got error when connecting to node`, {
error: err,
node: 'l1',
section: 'getTransaction',
})
this.metrics.nodeConnectionFailures.inc({
layer: 'l1',
section: 'getTransaction',
})
await sleep(15000)
return
}
const [stateRoots] = this.state.oo.contract.interface.decodeFunctionData(
'appendStateBatch',
batchTransaction.data
)
const batchStart = event.args._prevTotalElements.toNumber() + 1
const batchSize = event.args._batchSize.toNumber()
const batchEnd = batchStart + batchSize
if (latestBlock < batchEnd) {
this.logger.info(`node is behind, waiting for sync`, {
batchEnd,
latestBlock,
})
return
}
// `getBlockRange` has a limit of 1000 blocks, so we have to break this request out into
// multiple requests of maximum 1000 blocks in the case that batchSize > 1000.
let blocks: any[] = []
for (let i = 0; i < batchSize; i += 1000) {
let newBlocks: any[]
try {
newBlocks = await (
this.options.l2RpcProvider as ethers.providers.JsonRpcProvider
).send('eth_getBlockRange', [
toRpcHexString(batchStart + i),
toRpcHexString(batchStart + i + Math.min(batchSize - i, 1000) - 1),
false,
])
} catch (err) {
this.logger.error(`got error when connecting to node`, {
error: err,
node: 'l2',
section: 'getBlockRange',
})
this.metrics.nodeConnectionFailures.inc({
layer: 'l2',
section: 'getBlockRange',
})
await sleep(15000)
return
}
blocks = blocks.concat(newBlocks)
}
for (const [i, stateRoot] of stateRoots.entries()) {
if (blocks[i].stateRoot !== stateRoot) {
this.state.diverged = true
this.metrics.isCurrentlyMismatched.set(1)
this.logger.error(`state root mismatch`, {
blockNumber: blocks[i].number,
expectedStateRoot: blocks[i].stateRoot,
actualStateRoot: stateRoot,
finalizationTime: dateformat(
new Date(
(ethers.BigNumber.from(blocks[i].timestamp).toNumber() +
this.state.fpw) *
1000
),
'mmmm dS, yyyy, h:MM:ss TT'
),
})
return
}
}
}
this.logger.info(`checked batch ok`, {
batchIndex: this.state.highestCheckedBatchIndex,
})
this.state.highestCheckedBatchIndex++
this.metrics.highestBatchIndex.set(
{
......
......@@ -12,6 +12,7 @@ import { expect } from './setup'
import {
findEventForStateBatch,
findFirstUnfinalizedStateBatchIndex,
OutputOracle,
} from '../src'
describe('helpers', () => {
......@@ -28,6 +29,7 @@ describe('helpers', () => {
let AddressManager: Contract
let ChainStorageContainer: Contract
let StateCommitmentChain: Contract
let oracle: OutputOracle<any>
beforeEach(async () => {
// Set up fakes
FakeBondManager = await smock.fake(getContractInterface('BondManager'))
......@@ -67,6 +69,13 @@ describe('helpers', () => {
// Set up mock returns
FakeCanonicalTransactionChain.getTotalElements.returns(1000000000) // just needs to be large
FakeBondManager.isCollateralized.returns(true)
oracle = {
contract: StateCommitmentChain,
filter: StateCommitmentChain.filters.StateBatchAppended(),
getTotalElements: async () => StateCommitmentChain.getTotalBatches(),
getEventIndex: (args: any) => args._batchIndex,
}
})
describe('findEventForStateBatch', () => {
......@@ -79,7 +88,7 @@ describe('helpers', () => {
})
it('should return the event', async () => {
const event = await findEventForStateBatch(StateCommitmentChain, 0)
const event = await findEventForStateBatch(oracle, 0)
expect(event.args._batchIndex).to.equal(0)
})
......@@ -88,7 +97,7 @@ describe('helpers', () => {
describe('when the event does not exist', () => {
it('should throw an error', async () => {
await expect(
findEventForStateBatch(StateCommitmentChain, 0)
findEventForStateBatch(oracle, 0)
).to.eventually.be.rejectedWith('unable to find event for batch')
})
})
......@@ -119,7 +128,8 @@ describe('helpers', () => {
it('should find the first batch older than the FPW', async () => {
const first = await findFirstUnfinalizedStateBatchIndex(
StateCommitmentChain
oracle,
challengeWindowSeconds
)
expect(first).to.equal(1)
......@@ -144,7 +154,8 @@ describe('helpers', () => {
it('should return zero', async () => {
const first = await findFirstUnfinalizedStateBatchIndex(
StateCommitmentChain
oracle,
challengeWindowSeconds
)
expect(first).to.equal(0)
......@@ -177,7 +188,8 @@ describe('helpers', () => {
it('should return undefined', async () => {
const first = await findFirstUnfinalizedStateBatchIndex(
StateCommitmentChain
oracle,
challengeWindowSeconds
)
expect(first).to.equal(undefined)
......
......@@ -12090,7 +12090,7 @@ level-ws@^2.0.0:
readable-stream "^3.1.0"
xtend "^4.0.1"
level@^6.0.1:
"level6@npm:level@^6.0.1":
version "6.0.1"
resolved "https://registry.yarnpkg.com/level/-/level-6.0.1.tgz#dc34c5edb81846a6de5079eac15706334b0d7cd6"
integrity sha512-psRSqJZCsC/irNhfHzrVZbmPYXDcEYhA5TVNwr+V92jF44rbf86hqGp8fiT702FyiArScYIlPSBTDUASCVNSpw==
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment