Commit fc799dfb authored by mergify[bot]'s avatar mergify[bot] Committed by GitHub

Merge branch 'develop' into dependabot/npm_and_yarn/typedoc-0.25.1

parents 8fe4fc8f 85a7a62f
......@@ -22,7 +22,8 @@ import (
)
const (
proofsDir = "proofs"
proofsDir = "proofs"
diskStateCache = "state.json.gz"
)
type proofData struct {
......@@ -142,6 +143,20 @@ func (p *CannonTraceProvider) loadProof(ctx context.Context, i uint64) (*proofDa
// If the requested index is after the last step in the actual trace, extend the final no-op step
return p.lastProof, nil
}
// Attempt to read the last step from disk cache
if p.lastProof == nil && p.lastStep == 0 {
step, err := ReadLastStep(p.dir)
if err != nil {
p.logger.Warn("Failed to read last step from disk cache", "err", err)
} else {
p.lastStep = step
// If the last step is tracked, set i to the last step
// to read the correct proof from disk.
if i > p.lastStep {
i = step
}
}
}
path := filepath.Join(p.dir, proofsDir, fmt.Sprintf("%d.json.gz", i))
file, err := ioutil.OpenDecompressed(path)
if errors.Is(err, os.ErrNotExist) {
......@@ -168,6 +183,9 @@ func (p *CannonTraceProvider) loadProof(ctx context.Context, i uint64) (*proofDa
if err != nil {
return nil, fmt.Errorf("cannot hash witness: %w", err)
}
if err := WriteLastStep(p.dir, state.Step); err != nil {
p.logger.Warn("Failed to write last step to disk cache", "step", p.lastStep)
}
proof := &proofData{
ClaimValue: witnessHash,
StateData: hexutil.Bytes(witness),
......@@ -194,3 +212,28 @@ func (p *CannonTraceProvider) loadProof(ctx context.Context, i uint64) (*proofDa
}
return &proof, nil
}
type diskStateCacheObj struct {
Step uint64 `json:"step"`
}
// ReadLastStep reads the tracked last step from disk.
func ReadLastStep(dir string) (uint64, error) {
state := diskStateCacheObj{}
file, err := ioutil.OpenDecompressed(filepath.Join(dir, diskStateCache))
if err != nil {
return 0, err
}
defer file.Close()
err = json.NewDecoder(file).Decode(&state)
if err != nil {
return 0, err
}
return state.Step, nil
}
// WriteLastStep writes the last step to disk as a persistent cache.
func WriteLastStep(dir string, step uint64) error {
state := diskStateCacheObj{Step: step}
return ioutil.WriteCompressedJson(filepath.Join(dir, diskStateCache), state)
}
......@@ -65,8 +65,8 @@ func TestGet(t *testing.T) {
}
func TestGetStepData(t *testing.T) {
dataDir, prestate := setupTestData(t)
t.Run("ExistingProof", func(t *testing.T) {
dataDir, prestate := setupTestData(t)
provider, generator := setupWithTestData(t, dataDir, prestate)
value, proof, data, err := provider.GetStepData(context.Background(), 0)
require.NoError(t, err)
......@@ -80,6 +80,7 @@ func TestGetStepData(t *testing.T) {
})
t.Run("GenerateProof", func(t *testing.T) {
dataDir, prestate := setupTestData(t)
provider, generator := setupWithTestData(t, dataDir, prestate)
generator.finalState = &mipsevm.State{
Memory: &mipsevm.Memory{},
......@@ -105,6 +106,7 @@ func TestGetStepData(t *testing.T) {
})
t.Run("ProofAfterEndOfTrace", func(t *testing.T) {
dataDir, prestate := setupTestData(t)
provider, generator := setupWithTestData(t, dataDir, prestate)
generator.finalState = &mipsevm.State{
Memory: &mipsevm.Memory{},
......@@ -129,7 +131,52 @@ func TestGetStepData(t *testing.T) {
require.Nil(t, data)
})
t.Run("ReadLastStepFromDisk", func(t *testing.T) {
dataDir, prestate := setupTestData(t)
provider, initGenerator := setupWithTestData(t, dataDir, prestate)
initGenerator.finalState = &mipsevm.State{
Memory: &mipsevm.Memory{},
Step: 10,
Exited: true,
}
initGenerator.proof = &proofData{
ClaimValue: common.Hash{0xaa},
StateData: []byte{0xbb},
ProofData: []byte{0xcc},
OracleKey: common.Hash{0xdd}.Bytes(),
OracleValue: []byte{0xdd},
OracleOffset: 10,
}
_, _, _, err := provider.GetStepData(context.Background(), 7000)
require.NoError(t, err)
require.Contains(t, initGenerator.generated, 7000, "should have tried to generate the proof")
provider, generator := setupWithTestData(t, dataDir, prestate)
generator.finalState = &mipsevm.State{
Memory: &mipsevm.Memory{},
Step: 10,
Exited: true,
}
generator.proof = &proofData{
ClaimValue: common.Hash{0xaa},
StateData: []byte{0xbb},
ProofData: []byte{0xcc},
OracleKey: common.Hash{0xdd}.Bytes(),
OracleValue: []byte{0xdd},
OracleOffset: 10,
}
preimage, proof, data, err := provider.GetStepData(context.Background(), 7000)
require.NoError(t, err)
require.Contains(t, generator.generated, 10, "should have tried to generate the proof")
witness := generator.finalState.EncodeWitness()
require.EqualValues(t, witness, preimage)
require.Equal(t, []byte{}, proof)
require.Nil(t, data)
})
t.Run("MissingStateData", func(t *testing.T) {
dataDir, prestate := setupTestData(t)
provider, generator := setupWithTestData(t, dataDir, prestate)
_, _, _, err := provider.GetStepData(context.Background(), 1)
require.ErrorContains(t, err, "missing state data")
......@@ -137,6 +184,7 @@ func TestGetStepData(t *testing.T) {
})
t.Run("IgnoreUnknownFields", func(t *testing.T) {
dataDir, prestate := setupTestData(t)
provider, generator := setupWithTestData(t, dataDir, prestate)
value, proof, data, err := provider.GetStepData(context.Background(), 2)
require.NoError(t, err)
......
......@@ -2,6 +2,7 @@ package ioutil
import (
"compress/gzip"
"encoding/json"
"fmt"
"io"
"os"
......@@ -38,6 +39,20 @@ func OpenCompressed(file string, flag int, perm os.FileMode) (io.WriteCloser, er
return out, nil
}
// WriteCompressedJson writes the object to the specified file as a compressed json object
// if the filename ends with .gz.
func WriteCompressedJson(file string, obj any) error {
if !IsGzip(file) {
return fmt.Errorf("file %v does not have .gz extension", file)
}
out, err := OpenCompressed(file, os.O_CREATE|os.O_WRONLY, 0644)
if err != nil {
return err
}
defer out.Close()
return json.NewEncoder(out).Encode(obj)
}
// IsGzip determines if a path points to a gzip compressed file.
// Returns true when the file has a .gz extension.
func IsGzip(path string) bool {
......
package ioutil
import (
"encoding/json"
"io"
"os"
"path/filepath"
......@@ -47,3 +48,43 @@ func TestReadWriteWithOptionalCompression(t *testing.T) {
})
}
}
func TestWriteReadCompressedJson(t *testing.T) {
tests := []struct {
name string
filename string
err string
}{
{"Uncompressed", "test.notgz", "does not have .gz extension"},
{"Gzipped", "test.gz", ""},
}
for _, test := range tests {
test := test
t.Run(test.name, func(t *testing.T) {
dir := t.TempDir()
path := filepath.Join(dir, test.filename)
err := WriteCompressedJson(path, struct {
A int
B string
}{A: 1, B: "test"})
if test.err != "" {
require.ErrorContains(t, err, test.err)
return
}
require.NoError(t, err)
var read struct {
A int
B string
}
in, err := OpenDecompressed(path)
require.NoError(t, err)
err = json.NewDecoder(in).Decode(&read)
require.NoError(t, err)
require.Equal(t, struct {
A int
B string
}{A: 1, B: "test"}, read)
})
}
}
......@@ -50,7 +50,7 @@ importers:
version: 1.4.3
doctoc:
specifier: ^2.2.0
version: 2.2.0
version: 2.2.1
eslint:
specifier: ^8.43.0
version: 8.47.0
......@@ -5394,10 +5394,10 @@ packages:
json-schema-traverse: 0.4.1
uri-js: 4.4.1
/anchor-markdown-header@0.5.7:
resolution: {integrity: sha512-AmikqcK15r3q99hPvTa1na9n3eLkW0uE+RL9BZMSgwYalQeDnNXbYrN06BIcBPfGlmsGIE2jvkuvl/x0hyPF5Q==}
/anchor-markdown-header@0.6.0:
resolution: {integrity: sha512-v7HJMtE1X7wTpNFseRhxsY/pivP4uAJbidVhPT+yhz4i/vV1+qx371IXuV9V7bN6KjFtheLJxqaSm0Y/8neJTA==}
dependencies:
emoji-regex: 6.1.3
emoji-regex: 10.1.0
dev: true
/ansi-colors@4.1.1:
......@@ -6921,14 +6921,14 @@ packages:
dependencies:
path-type: 4.0.0
/doctoc@2.2.0:
resolution: {integrity: sha512-PtiyaS+S3kcMbpx6x2V0S+PeDKisxmjEFnZsuYkkj4Lh3ObozJuuYh9dM4+sX02Ouuty8RF2LOCnIbpu/hWy/A==}
/doctoc@2.2.1:
resolution: {integrity: sha512-qNJ1gsuo7hH40vlXTVVrADm6pdg30bns/Mo7Nv1SxuXSM1bwF9b4xQ40a6EFT/L1cI+Yylbyi8MPI4G4y7XJzQ==}
hasBin: true
dependencies:
'@textlint/markdown-to-ast': 12.2.1
anchor-markdown-header: 0.5.7
anchor-markdown-header: 0.6.0
htmlparser2: 7.2.0
minimist: 1.2.6
minimist: 1.2.8
underscore: 1.13.4
update-section: 0.3.3
transitivePeerDependencies:
......@@ -7068,8 +7068,8 @@ packages:
engines: {node: '>=12'}
dev: true
/emoji-regex@6.1.3:
resolution: {integrity: sha512-73/zxHTjP2N2FQf0J5ngNjxP9LqG2krUshxYaowI8HxZQsiL2pYJc3k9/O93fc5/lCSkZv+bQ5Esk6k6msiSvg==}
/emoji-regex@10.1.0:
resolution: {integrity: sha512-xAEnNCT3w2Tg6MA7ly6QqYJvEoY1tm9iIjJ3yMKK9JPlWuRHAMoe5iETwQnx3M9TVbFMfsrBgWKR+IsmswwNjg==}
dev: true
/emoji-regex@8.0.0:
......@@ -11051,10 +11051,6 @@ packages:
kind-of: 6.0.3
dev: false
/minimist@1.2.6:
resolution: {integrity: sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==}
dev: true
/minimist@1.2.8:
resolution: {integrity: sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment