Commit 70af5045 authored by Michael de Hoog's avatar Michael de Hoog

Add support for unbatched RPC calls when batchSize == 1

parent 8e355d1d
...@@ -24,6 +24,7 @@ type IterativeBatchCall[K any, V any] struct { ...@@ -24,6 +24,7 @@ type IterativeBatchCall[K any, V any] struct {
makeRequest func(K) (V, rpc.BatchElem) makeRequest func(K) (V, rpc.BatchElem)
getBatch BatchCallContextFn getBatch BatchCallContextFn
getSingle CallContextFn
requestsValues []V requestsValues []V
scheduled chan rpc.BatchElem scheduled chan rpc.BatchElem
...@@ -35,6 +36,7 @@ func NewIterativeBatchCall[K any, V any]( ...@@ -35,6 +36,7 @@ func NewIterativeBatchCall[K any, V any](
requestsKeys []K, requestsKeys []K,
makeRequest func(K) (V, rpc.BatchElem), makeRequest func(K) (V, rpc.BatchElem),
getBatch BatchCallContextFn, getBatch BatchCallContextFn,
getSingle CallContextFn,
batchSize int) *IterativeBatchCall[K, V] { batchSize int) *IterativeBatchCall[K, V] {
if len(requestsKeys) < batchSize { if len(requestsKeys) < batchSize {
...@@ -47,6 +49,7 @@ func NewIterativeBatchCall[K any, V any]( ...@@ -47,6 +49,7 @@ func NewIterativeBatchCall[K any, V any](
out := &IterativeBatchCall[K, V]{ out := &IterativeBatchCall[K, V]{
completed: 0, completed: 0,
getBatch: getBatch, getBatch: getBatch,
getSingle: getSingle,
requestsKeys: requestsKeys, requestsKeys: requestsKeys,
batchSize: batchSize, batchSize: batchSize,
makeRequest: makeRequest, makeRequest: makeRequest,
...@@ -119,11 +122,23 @@ func (ibc *IterativeBatchCall[K, V]) Fetch(ctx context.Context) error { ...@@ -119,11 +122,23 @@ func (ibc *IterativeBatchCall[K, V]) Fetch(ctx context.Context) error {
break break
} }
if err := ibc.getBatch(ctx, batch); err != nil { if len(batch) == 0 {
for _, r := range batch { return nil
ibc.scheduled <- r }
if ibc.batchSize == 1 {
first := batch[0]
if err := ibc.getSingle(ctx, &first.Result, first.Method, first.Args...); err != nil {
ibc.scheduled <- first
return err
}
} else {
if err := ibc.getBatch(ctx, batch); err != nil {
for _, r := range batch {
ibc.scheduled <- r
}
return fmt.Errorf("failed batch-retrieval: %w", err)
} }
return fmt.Errorf("failed batch-retrieval: %w", err)
} }
var result error var result error
for _, elem := range batch { for _, elem := range batch {
......
...@@ -34,7 +34,8 @@ type batchTestCase struct { ...@@ -34,7 +34,8 @@ type batchTestCase struct {
batchSize int batchSize int
batchCalls []batchCall batchCalls []batchCall
singleCalls []elemCall
mock.Mock mock.Mock
} }
...@@ -53,7 +54,14 @@ func (tc *batchTestCase) GetBatch(ctx context.Context, b []rpc.BatchElem) error ...@@ -53,7 +54,14 @@ func (tc *batchTestCase) GetBatch(ctx context.Context, b []rpc.BatchElem) error
if ctx.Err() != nil { if ctx.Err() != nil {
return ctx.Err() return ctx.Err()
} }
return tc.Mock.MethodCalled("get", b).Get(0).([]error)[0] return tc.Mock.MethodCalled("getBatch", b).Get(0).([]error)[0]
}
func (tc *batchTestCase) GetSingle(ctx context.Context, result any, method string, args ...any) error {
if ctx.Err() != nil {
return ctx.Err()
}
return tc.Mock.MethodCalled("getSingle", (*(result.(*interface{}))).(*string), method, args[0]).Get(0).([]error)[0]
} }
var mockErr = errors.New("mockErr") var mockErr = errors.New("mockErr")
...@@ -64,7 +72,7 @@ func (tc *batchTestCase) Run(t *testing.T) { ...@@ -64,7 +72,7 @@ func (tc *batchTestCase) Run(t *testing.T) {
keys[i] = i keys[i] = i
} }
makeMock := func(bci int, bc batchCall) func(args mock.Arguments) { makeBatchMock := func(bc batchCall) func(args mock.Arguments) {
return func(args mock.Arguments) { return func(args mock.Arguments) {
batch := args[0].([]rpc.BatchElem) batch := args[0].([]rpc.BatchElem)
for i, elem := range batch { for i, elem := range batch {
...@@ -83,7 +91,7 @@ func (tc *batchTestCase) Run(t *testing.T) { ...@@ -83,7 +91,7 @@ func (tc *batchTestCase) Run(t *testing.T) {
} }
} }
// mock all the results of the batch calls // mock all the results of the batch calls
for bci, bc := range tc.batchCalls { for _, bc := range tc.batchCalls {
var batch []rpc.BatchElem var batch []rpc.BatchElem
for _, elem := range bc.elems { for _, elem := range bc.elems {
batch = append(batch, rpc.BatchElem{ batch = append(batch, rpc.BatchElem{
...@@ -94,10 +102,30 @@ func (tc *batchTestCase) Run(t *testing.T) { ...@@ -94,10 +102,30 @@ func (tc *batchTestCase) Run(t *testing.T) {
}) })
} }
if len(bc.elems) > 0 { if len(bc.elems) > 0 {
tc.On("get", batch).Once().Run(makeMock(bci, bc)).Return([]error{bc.rpcErr}) // wrap to preserve nil as type of error tc.On("getBatch", batch).Once().Run(makeBatchMock(bc)).Return([]error{bc.rpcErr}) // wrap to preserve nil as type of error
}
}
makeSingleMock := func(ec elemCall) func(args mock.Arguments) {
return func(args mock.Arguments) {
result := args[0].(*string)
id := args[2].(int)
require.Equal(t, ec.id, id, "element should match expected element")
if ec.err {
*result = ""
} else {
*result = fmt.Sprintf("mock result id %d", id)
}
} }
} }
iter := NewIterativeBatchCall[int, *string](keys, makeTestRequest, tc.GetBatch, tc.batchSize) // mock the results of unbatched calls
for _, ec := range tc.singleCalls {
var ret error
if ec.err {
ret = mockErr
}
tc.On("getSingle", new(string), "testing_foobar", ec.id).Once().Run(makeSingleMock(ec)).Return([]error{ret})
}
iter := NewIterativeBatchCall[int, *string](keys, makeTestRequest, tc.GetBatch, tc.GetSingle, tc.batchSize)
for i, bc := range tc.batchCalls { for i, bc := range tc.batchCalls {
ctx := context.Background() ctx := context.Background()
if bc.makeCtx != nil { if bc.makeCtx != nil {
...@@ -116,6 +144,20 @@ func (tc *batchTestCase) Run(t *testing.T) { ...@@ -116,6 +144,20 @@ func (tc *batchTestCase) Run(t *testing.T) {
} }
} }
} }
for i, ec := range tc.singleCalls {
ctx := context.Background()
err := iter.Fetch(ctx)
if err == io.EOF {
require.Equal(t, i, len(tc.singleCalls)-1, "EOF only on last call")
} else {
require.False(t, iter.Complete())
if ec.err {
require.Error(t, err)
} else {
require.NoError(t, err)
}
}
}
require.True(t, iter.Complete(), "batch iter should be complete after the expected calls") require.True(t, iter.Complete(), "batch iter should be complete after the expected calls")
out, err := iter.Result() out, err := iter.Result()
require.NoError(t, err) require.NoError(t, err)
...@@ -154,6 +196,37 @@ func TestFetchBatched(t *testing.T) { ...@@ -154,6 +196,37 @@ func TestFetchBatched(t *testing.T) {
}, },
}, },
}, },
{
name: "single element",
items: 1,
batchSize: 4,
singleCalls: []elemCall{
{id: 0, err: false},
},
},
{
name: "unbatched",
items: 4,
batchSize: 1,
singleCalls: []elemCall{
{id: 0, err: false},
{id: 1, err: false},
{id: 2, err: false},
{id: 3, err: false},
},
},
{
name: "unbatched with retry",
items: 4,
batchSize: 1,
singleCalls: []elemCall{
{id: 0, err: false},
{id: 1, err: true},
{id: 2, err: false},
{id: 3, err: false},
{id: 1, err: false},
},
},
{ {
name: "split", name: "split",
items: 5, items: 5,
...@@ -240,7 +313,7 @@ func TestFetchBatched(t *testing.T) { ...@@ -240,7 +313,7 @@ func TestFetchBatched(t *testing.T) {
}, },
{ {
name: "context timeout", name: "context timeout",
items: 1, items: 2,
batchSize: 3, batchSize: 3,
batchCalls: []batchCall{ batchCalls: []batchCall{
{ {
...@@ -255,6 +328,7 @@ func TestFetchBatched(t *testing.T) { ...@@ -255,6 +328,7 @@ func TestFetchBatched(t *testing.T) {
{ {
elems: []elemCall{ elems: []elemCall{
{id: 0, err: false}, {id: 0, err: false},
{id: 1, err: false},
}, },
err: "", err: "",
}, },
......
...@@ -373,6 +373,7 @@ func (job *receiptsFetchingJob) runFetcher(ctx context.Context) error { ...@@ -373,6 +373,7 @@ func (job *receiptsFetchingJob) runFetcher(ctx context.Context) error {
job.txHashes, job.txHashes,
makeReceiptRequest, makeReceiptRequest,
job.client.BatchCallContext, job.client.BatchCallContext,
job.client.CallContext,
job.maxBatchSize, job.maxBatchSize,
) )
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment