Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
N
nebula
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
exchain
nebula
Commits
d64f5993
Unverified
Commit
d64f5993
authored
Apr 13, 2023
by
mergify[bot]
Committed by
GitHub
Apr 13, 2023
Browse files
Options
Browse Files
Download
Plain Diff
Merge branch 'develop' into aj/fpp-pointers
parents
c5d902ec
7312bbf3
Changes
4
Show whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
109 additions
and
14 deletions
+109
-14
blocktime_test.go
op-e2e/actions/blocktime_test.go
+3
-3
batching.go
op-node/sources/batching.go
+24
-4
batching_test.go
op-node/sources/batching_test.go
+81
-7
receipts.go
op-node/sources/receipts.go
+1
-0
No files found.
op-e2e/actions/blocktime_test.go
View file @
d64f5993
...
@@ -28,12 +28,11 @@ func TestBatchInLastPossibleBlocks(gt *testing.T) {
...
@@ -28,12 +28,11 @@ func TestBatchInLastPossibleBlocks(gt *testing.T) {
signer
:=
types
.
LatestSigner
(
sd
.
L2Cfg
.
Config
)
signer
:=
types
.
LatestSigner
(
sd
.
L2Cfg
.
Config
)
cl
:=
sequencerEngine
.
EthClient
()
cl
:=
sequencerEngine
.
EthClient
()
aliceNonce
:=
uint64
(
0
)
// manual nonce management to avoid geth pending-tx nonce non-determinism flakiness
aliceTx
:=
func
()
{
aliceTx
:=
func
()
{
n
,
err
:=
cl
.
PendingNonceAt
(
t
.
Ctx
(),
dp
.
Addresses
.
Alice
)
require
.
NoError
(
t
,
err
)
tx
:=
types
.
MustSignNewTx
(
dp
.
Secrets
.
Alice
,
signer
,
&
types
.
DynamicFeeTx
{
tx
:=
types
.
MustSignNewTx
(
dp
.
Secrets
.
Alice
,
signer
,
&
types
.
DynamicFeeTx
{
ChainID
:
sd
.
L2Cfg
.
Config
.
ChainID
,
ChainID
:
sd
.
L2Cfg
.
Config
.
ChainID
,
Nonce
:
n
,
Nonce
:
aliceNonce
,
GasTipCap
:
big
.
NewInt
(
2
*
params
.
GWei
),
GasTipCap
:
big
.
NewInt
(
2
*
params
.
GWei
),
GasFeeCap
:
new
(
big
.
Int
)
.
Add
(
miner
.
l1Chain
.
CurrentBlock
()
.
BaseFee
,
big
.
NewInt
(
2
*
params
.
GWei
)),
GasFeeCap
:
new
(
big
.
Int
)
.
Add
(
miner
.
l1Chain
.
CurrentBlock
()
.
BaseFee
,
big
.
NewInt
(
2
*
params
.
GWei
)),
Gas
:
params
.
TxGas
,
Gas
:
params
.
TxGas
,
...
@@ -41,6 +40,7 @@ func TestBatchInLastPossibleBlocks(gt *testing.T) {
...
@@ -41,6 +40,7 @@ func TestBatchInLastPossibleBlocks(gt *testing.T) {
Value
:
e2eutils
.
Ether
(
2
),
Value
:
e2eutils
.
Ether
(
2
),
})
})
require
.
NoError
(
gt
,
cl
.
SendTransaction
(
t
.
Ctx
(),
tx
))
require
.
NoError
(
gt
,
cl
.
SendTransaction
(
t
.
Ctx
(),
tx
))
aliceNonce
+=
1
}
}
makeL2BlockWithAliceTx
:=
func
()
{
makeL2BlockWithAliceTx
:=
func
()
{
aliceTx
()
aliceTx
()
...
...
op-node/sources/batching.go
View file @
d64f5993
...
@@ -24,6 +24,7 @@ type IterativeBatchCall[K any, V any] struct {
...
@@ -24,6 +24,7 @@ type IterativeBatchCall[K any, V any] struct {
makeRequest
func
(
K
)
(
V
,
rpc
.
BatchElem
)
makeRequest
func
(
K
)
(
V
,
rpc
.
BatchElem
)
getBatch
BatchCallContextFn
getBatch
BatchCallContextFn
getSingle
CallContextFn
requestsValues
[]
V
requestsValues
[]
V
scheduled
chan
rpc
.
BatchElem
scheduled
chan
rpc
.
BatchElem
...
@@ -35,6 +36,7 @@ func NewIterativeBatchCall[K any, V any](
...
@@ -35,6 +36,7 @@ func NewIterativeBatchCall[K any, V any](
requestsKeys
[]
K
,
requestsKeys
[]
K
,
makeRequest
func
(
K
)
(
V
,
rpc
.
BatchElem
),
makeRequest
func
(
K
)
(
V
,
rpc
.
BatchElem
),
getBatch
BatchCallContextFn
,
getBatch
BatchCallContextFn
,
getSingle
CallContextFn
,
batchSize
int
)
*
IterativeBatchCall
[
K
,
V
]
{
batchSize
int
)
*
IterativeBatchCall
[
K
,
V
]
{
if
len
(
requestsKeys
)
<
batchSize
{
if
len
(
requestsKeys
)
<
batchSize
{
...
@@ -47,6 +49,7 @@ func NewIterativeBatchCall[K any, V any](
...
@@ -47,6 +49,7 @@ func NewIterativeBatchCall[K any, V any](
out
:=
&
IterativeBatchCall
[
K
,
V
]{
out
:=
&
IterativeBatchCall
[
K
,
V
]{
completed
:
0
,
completed
:
0
,
getBatch
:
getBatch
,
getBatch
:
getBatch
,
getSingle
:
getSingle
,
requestsKeys
:
requestsKeys
,
requestsKeys
:
requestsKeys
,
batchSize
:
batchSize
,
batchSize
:
batchSize
,
makeRequest
:
makeRequest
,
makeRequest
:
makeRequest
,
...
@@ -84,6 +87,11 @@ func (ibc *IterativeBatchCall[K, V]) Fetch(ctx context.Context) error {
...
@@ -84,6 +87,11 @@ func (ibc *IterativeBatchCall[K, V]) Fetch(ctx context.Context) error {
ibc
.
resetLock
.
RLock
()
ibc
.
resetLock
.
RLock
()
defer
ibc
.
resetLock
.
RUnlock
()
defer
ibc
.
resetLock
.
RUnlock
()
// return early if context is Done
if
ctx
.
Err
()
!=
nil
{
return
ctx
.
Err
()
}
// collect a batch from the requests channel
// collect a batch from the requests channel
batch
:=
make
([]
rpc
.
BatchElem
,
0
,
ibc
.
batchSize
)
batch
:=
make
([]
rpc
.
BatchElem
,
0
,
ibc
.
batchSize
)
// wait for first element
// wait for first element
...
@@ -119,12 +127,24 @@ func (ibc *IterativeBatchCall[K, V]) Fetch(ctx context.Context) error {
...
@@ -119,12 +127,24 @@ func (ibc *IterativeBatchCall[K, V]) Fetch(ctx context.Context) error {
break
break
}
}
if
len
(
batch
)
==
0
{
return
nil
}
if
ibc
.
batchSize
==
1
{
first
:=
batch
[
0
]
if
err
:=
ibc
.
getSingle
(
ctx
,
&
first
.
Result
,
first
.
Method
,
first
.
Args
...
);
err
!=
nil
{
ibc
.
scheduled
<-
first
return
err
}
}
else
{
if
err
:=
ibc
.
getBatch
(
ctx
,
batch
);
err
!=
nil
{
if
err
:=
ibc
.
getBatch
(
ctx
,
batch
);
err
!=
nil
{
for
_
,
r
:=
range
batch
{
for
_
,
r
:=
range
batch
{
ibc
.
scheduled
<-
r
ibc
.
scheduled
<-
r
}
}
return
fmt
.
Errorf
(
"failed batch-retrieval: %w"
,
err
)
return
fmt
.
Errorf
(
"failed batch-retrieval: %w"
,
err
)
}
}
}
var
result
error
var
result
error
for
_
,
elem
:=
range
batch
{
for
_
,
elem
:=
range
batch
{
if
elem
.
Error
!=
nil
{
if
elem
.
Error
!=
nil
{
...
...
op-node/sources/batching_test.go
View file @
d64f5993
...
@@ -35,6 +35,7 @@ type batchTestCase struct {
...
@@ -35,6 +35,7 @@ type batchTestCase struct {
batchSize
int
batchSize
int
batchCalls
[]
batchCall
batchCalls
[]
batchCall
singleCalls
[]
elemCall
mock
.
Mock
mock
.
Mock
}
}
...
@@ -53,7 +54,14 @@ func (tc *batchTestCase) GetBatch(ctx context.Context, b []rpc.BatchElem) error
...
@@ -53,7 +54,14 @@ func (tc *batchTestCase) GetBatch(ctx context.Context, b []rpc.BatchElem) error
if
ctx
.
Err
()
!=
nil
{
if
ctx
.
Err
()
!=
nil
{
return
ctx
.
Err
()
return
ctx
.
Err
()
}
}
return
tc
.
Mock
.
MethodCalled
(
"get"
,
b
)
.
Get
(
0
)
.
([]
error
)[
0
]
return
tc
.
Mock
.
MethodCalled
(
"getBatch"
,
b
)
.
Get
(
0
)
.
([]
error
)[
0
]
}
func
(
tc
*
batchTestCase
)
GetSingle
(
ctx
context
.
Context
,
result
any
,
method
string
,
args
...
any
)
error
{
if
ctx
.
Err
()
!=
nil
{
return
ctx
.
Err
()
}
return
tc
.
Mock
.
MethodCalled
(
"getSingle"
,
(
*
(
result
.
(
*
interface
{})))
.
(
*
string
),
method
,
args
[
0
])
.
Get
(
0
)
.
([]
error
)[
0
]
}
}
var
mockErr
=
errors
.
New
(
"mockErr"
)
var
mockErr
=
errors
.
New
(
"mockErr"
)
...
@@ -64,7 +72,7 @@ func (tc *batchTestCase) Run(t *testing.T) {
...
@@ -64,7 +72,7 @@ func (tc *batchTestCase) Run(t *testing.T) {
keys
[
i
]
=
i
keys
[
i
]
=
i
}
}
make
Mock
:=
func
(
bci
int
,
bc
batchCall
)
func
(
args
mock
.
Arguments
)
{
make
BatchMock
:=
func
(
bc
batchCall
)
func
(
args
mock
.
Arguments
)
{
return
func
(
args
mock
.
Arguments
)
{
return
func
(
args
mock
.
Arguments
)
{
batch
:=
args
[
0
]
.
([]
rpc
.
BatchElem
)
batch
:=
args
[
0
]
.
([]
rpc
.
BatchElem
)
for
i
,
elem
:=
range
batch
{
for
i
,
elem
:=
range
batch
{
...
@@ -83,7 +91,7 @@ func (tc *batchTestCase) Run(t *testing.T) {
...
@@ -83,7 +91,7 @@ func (tc *batchTestCase) Run(t *testing.T) {
}
}
}
}
// mock all the results of the batch calls
// mock all the results of the batch calls
for
bci
,
bc
:=
range
tc
.
batchCalls
{
for
_
,
bc
:=
range
tc
.
batchCalls
{
var
batch
[]
rpc
.
BatchElem
var
batch
[]
rpc
.
BatchElem
for
_
,
elem
:=
range
bc
.
elems
{
for
_
,
elem
:=
range
bc
.
elems
{
batch
=
append
(
batch
,
rpc
.
BatchElem
{
batch
=
append
(
batch
,
rpc
.
BatchElem
{
...
@@ -94,10 +102,30 @@ func (tc *batchTestCase) Run(t *testing.T) {
...
@@ -94,10 +102,30 @@ func (tc *batchTestCase) Run(t *testing.T) {
})
})
}
}
if
len
(
bc
.
elems
)
>
0
{
if
len
(
bc
.
elems
)
>
0
{
tc
.
On
(
"get"
,
batch
)
.
Once
()
.
Run
(
makeMock
(
bci
,
bc
))
.
Return
([]
error
{
bc
.
rpcErr
})
// wrap to preserve nil as type of error
tc
.
On
(
"getBatch"
,
batch
)
.
Once
()
.
Run
(
makeBatchMock
(
bc
))
.
Return
([]
error
{
bc
.
rpcErr
})
// wrap to preserve nil as type of error
}
}
makeSingleMock
:=
func
(
ec
elemCall
)
func
(
args
mock
.
Arguments
)
{
return
func
(
args
mock
.
Arguments
)
{
result
:=
args
[
0
]
.
(
*
string
)
id
:=
args
[
2
]
.
(
int
)
require
.
Equal
(
t
,
ec
.
id
,
id
,
"element should match expected element"
)
if
ec
.
err
{
*
result
=
""
}
else
{
*
result
=
fmt
.
Sprintf
(
"mock result id %d"
,
id
)
}
}
}
// mock the results of unbatched calls
for
_
,
ec
:=
range
tc
.
singleCalls
{
var
ret
error
if
ec
.
err
{
ret
=
mockErr
}
}
tc
.
On
(
"getSingle"
,
new
(
string
),
"testing_foobar"
,
ec
.
id
)
.
Once
()
.
Run
(
makeSingleMock
(
ec
))
.
Return
([]
error
{
ret
})
}
}
iter
:=
NewIterativeBatchCall
[
int
,
*
string
](
keys
,
makeTestRequest
,
tc
.
GetBatch
,
tc
.
batchSize
)
iter
:=
NewIterativeBatchCall
[
int
,
*
string
](
keys
,
makeTestRequest
,
tc
.
GetBatch
,
tc
.
GetSingle
,
tc
.
batchSize
)
for
i
,
bc
:=
range
tc
.
batchCalls
{
for
i
,
bc
:=
range
tc
.
batchCalls
{
ctx
:=
context
.
Background
()
ctx
:=
context
.
Background
()
if
bc
.
makeCtx
!=
nil
{
if
bc
.
makeCtx
!=
nil
{
...
@@ -116,6 +144,20 @@ func (tc *batchTestCase) Run(t *testing.T) {
...
@@ -116,6 +144,20 @@ func (tc *batchTestCase) Run(t *testing.T) {
}
}
}
}
}
}
for
i
,
ec
:=
range
tc
.
singleCalls
{
ctx
:=
context
.
Background
()
err
:=
iter
.
Fetch
(
ctx
)
if
err
==
io
.
EOF
{
require
.
Equal
(
t
,
i
,
len
(
tc
.
singleCalls
)
-
1
,
"EOF only on last call"
)
}
else
{
require
.
False
(
t
,
iter
.
Complete
())
if
ec
.
err
{
require
.
Error
(
t
,
err
)
}
else
{
require
.
NoError
(
t
,
err
)
}
}
}
require
.
True
(
t
,
iter
.
Complete
(),
"batch iter should be complete after the expected calls"
)
require
.
True
(
t
,
iter
.
Complete
(),
"batch iter should be complete after the expected calls"
)
out
,
err
:=
iter
.
Result
()
out
,
err
:=
iter
.
Result
()
require
.
NoError
(
t
,
err
)
require
.
NoError
(
t
,
err
)
...
@@ -154,6 +196,37 @@ func TestFetchBatched(t *testing.T) {
...
@@ -154,6 +196,37 @@ func TestFetchBatched(t *testing.T) {
},
},
},
},
},
},
{
name
:
"single element"
,
items
:
1
,
batchSize
:
4
,
singleCalls
:
[]
elemCall
{
{
id
:
0
,
err
:
false
},
},
},
{
name
:
"unbatched"
,
items
:
4
,
batchSize
:
1
,
singleCalls
:
[]
elemCall
{
{
id
:
0
,
err
:
false
},
{
id
:
1
,
err
:
false
},
{
id
:
2
,
err
:
false
},
{
id
:
3
,
err
:
false
},
},
},
{
name
:
"unbatched with retry"
,
items
:
4
,
batchSize
:
1
,
singleCalls
:
[]
elemCall
{
{
id
:
0
,
err
:
false
},
{
id
:
1
,
err
:
true
},
{
id
:
2
,
err
:
false
},
{
id
:
3
,
err
:
false
},
{
id
:
1
,
err
:
false
},
},
},
{
{
name
:
"split"
,
name
:
"split"
,
items
:
5
,
items
:
5
,
...
@@ -240,7 +313,7 @@ func TestFetchBatched(t *testing.T) {
...
@@ -240,7 +313,7 @@ func TestFetchBatched(t *testing.T) {
},
},
{
{
name
:
"context timeout"
,
name
:
"context timeout"
,
items
:
1
,
items
:
2
,
batchSize
:
3
,
batchSize
:
3
,
batchCalls
:
[]
batchCall
{
batchCalls
:
[]
batchCall
{
{
{
...
@@ -255,6 +328,7 @@ func TestFetchBatched(t *testing.T) {
...
@@ -255,6 +328,7 @@ func TestFetchBatched(t *testing.T) {
{
{
elems
:
[]
elemCall
{
elems
:
[]
elemCall
{
{
id
:
0
,
err
:
false
},
{
id
:
0
,
err
:
false
},
{
id
:
1
,
err
:
false
},
},
},
err
:
""
,
err
:
""
,
},
},
...
...
op-node/sources/receipts.go
View file @
d64f5993
...
@@ -373,6 +373,7 @@ func (job *receiptsFetchingJob) runFetcher(ctx context.Context) error {
...
@@ -373,6 +373,7 @@ func (job *receiptsFetchingJob) runFetcher(ctx context.Context) error {
job
.
txHashes
,
job
.
txHashes
,
makeReceiptRequest
,
makeReceiptRequest
,
job
.
client
.
BatchCallContext
,
job
.
client
.
BatchCallContext
,
job
.
client
.
CallContext
,
job
.
maxBatchSize
,
job
.
maxBatchSize
,
)
)
}
}
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment