Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
M
mybee
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
vicotor
mybee
Commits
f56d2edb
Unverified
Commit
f56d2edb
authored
Feb 20, 2021
by
Rodrigo Q. Saramago
Committed by
GitHub
Feb 20, 2021
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Cac refactor (#1281)
parent
6b4b8e06
Changes
11
Hide whitespace changes
Inline
Side-by-side
Showing
11 changed files
with
177 additions
and
144 deletions
+177
-144
chunk.go
pkg/api/chunk.go
+7
-18
soc.go
pkg/api/soc.go
+3
-20
cac.go
pkg/cac/cac.go
+56
-9
cac_test.go
pkg/cac/cac_test.go
+85
-5
export_test.go
pkg/cac/export_test.go
+10
-0
putter.go
pkg/feeds/putter.go
+2
-16
job.go
pkg/file/splitter/internal/job.go
+4
-20
export_test.go
pkg/soc/export_test.go
+2
-3
soc.go
pkg/soc/soc.go
+2
-25
soc_test.go
pkg/soc/soc_test.go
+3
-8
chunk.go
pkg/storage/testing/chunk.go
+3
-20
No files found.
pkg/api/chunk.go
View file @
f56d2edb
...
@@ -12,7 +12,7 @@ import (
...
@@ -12,7 +12,7 @@ import (
"io/ioutil"
"io/ioutil"
"net/http"
"net/http"
"github.com/ethersphere/bee/pkg/
bmtpool
"
"github.com/ethersphere/bee/pkg/
cac
"
"github.com/ethersphere/bee/pkg/netstore"
"github.com/ethersphere/bee/pkg/netstore"
"github.com/ethersphere/bee/pkg/jsonhttp"
"github.com/ethersphere/bee/pkg/jsonhttp"
...
@@ -75,28 +75,17 @@ func (s *server) chunkUploadHandler(w http.ResponseWriter, r *http.Request) {
...
@@ -75,28 +75,17 @@ func (s *server) chunkUploadHandler(w http.ResponseWriter, r *http.Request) {
return
return
}
}
hasher
:=
bmtpool
.
Get
()
chunk
,
err
:=
cac
.
NewWithDataSpan
(
data
)
defer
bmtpool
.
Put
(
hasher
)
err
=
hasher
.
SetSpanBytes
(
data
[
:
swarm
.
SpanSize
])
if
err
!=
nil
{
s
.
logger
.
Debugf
(
"chunk upload: set span: %v"
,
err
)
s
.
logger
.
Error
(
"chunk upload: span error"
)
jsonhttp
.
InternalServerError
(
w
,
"span error"
)
return
}
_
,
err
=
hasher
.
Write
(
data
[
swarm
.
SpanSize
:
])
if
err
!=
nil
{
if
err
!=
nil
{
s
.
logger
.
Debugf
(
"chunk upload: create chunk error: %v"
,
err
)
s
.
logger
.
Error
(
"chunk upload: create chunk error"
)
jsonhttp
.
InternalServerError
(
w
,
"create chunk error"
)
return
return
}
}
address
:=
swarm
.
NewAddress
(
hasher
.
Sum
(
nil
))
chunk
:=
swarm
.
NewChunk
(
address
,
data
)
seen
,
err
:=
s
.
storer
.
Put
(
ctx
,
requestModePut
(
r
),
chunk
)
seen
,
err
:=
s
.
storer
.
Put
(
ctx
,
requestModePut
(
r
),
chunk
)
if
err
!=
nil
{
if
err
!=
nil
{
s
.
logger
.
Debugf
(
"chunk upload: chunk write error: %v, addr %s"
,
err
,
address
)
s
.
logger
.
Debugf
(
"chunk upload: chunk write error: %v, addr %s"
,
err
,
chunk
.
Address
()
)
s
.
logger
.
Error
(
"chunk upload: chunk write error"
)
s
.
logger
.
Error
(
"chunk upload: chunk write error"
)
jsonhttp
.
BadRequest
(
w
,
"chunk write error"
)
jsonhttp
.
BadRequest
(
w
,
"chunk write error"
)
return
return
...
@@ -123,7 +112,7 @@ func (s *server) chunkUploadHandler(w http.ResponseWriter, r *http.Request) {
...
@@ -123,7 +112,7 @@ func (s *server) chunkUploadHandler(w http.ResponseWriter, r *http.Request) {
}
}
w
.
Header
()
.
Set
(
"Access-Control-Expose-Headers"
,
SwarmTagHeader
)
w
.
Header
()
.
Set
(
"Access-Control-Expose-Headers"
,
SwarmTagHeader
)
jsonhttp
.
OK
(
w
,
chunkAddressResponse
{
Reference
:
address
})
jsonhttp
.
OK
(
w
,
chunkAddressResponse
{
Reference
:
chunk
.
Address
()
})
}
}
func
(
s
*
server
)
chunkGetHandler
(
w
http
.
ResponseWriter
,
r
*
http
.
Request
)
{
func
(
s
*
server
)
chunkGetHandler
(
w
http
.
ResponseWriter
,
r
*
http
.
Request
)
{
...
...
pkg/api/soc.go
View file @
f56d2edb
...
@@ -10,16 +10,14 @@ import (
...
@@ -10,16 +10,14 @@ import (
"io/ioutil"
"io/ioutil"
"net/http"
"net/http"
"github.com/ethersphere/bee/pkg/
bmtpool
"
"github.com/ethersphere/bee/pkg/
cac
"
"github.com/ethersphere/bee/pkg/jsonhttp"
"github.com/ethersphere/bee/pkg/jsonhttp"
"github.com/ethersphere/bee/pkg/soc"
"github.com/ethersphere/bee/pkg/soc"
"github.com/ethersphere/bee/pkg/swarm"
"github.com/ethersphere/bee/pkg/swarm"
"github.com/gorilla/mux"
"github.com/gorilla/mux"
)
)
var
(
var
errBadRequestParams
=
errors
.
New
(
"owner, id or span is not well formed"
)
errBadRequestParams
=
errors
.
New
(
"owner, id or span is not well formed"
)
)
type
socPostResponse
struct
{
type
socPostResponse
struct
{
Reference
swarm
.
Address
`json:"reference"`
Reference
swarm
.
Address
`json:"reference"`
...
@@ -82,7 +80,7 @@ func (s *server) socUploadHandler(w http.ResponseWriter, r *http.Request) {
...
@@ -82,7 +80,7 @@ func (s *server) socUploadHandler(w http.ResponseWriter, r *http.Request) {
return
return
}
}
ch
,
err
:=
c
hunk
(
data
)
ch
,
err
:=
c
ac
.
NewWithDataSpan
(
data
)
if
err
!=
nil
{
if
err
!=
nil
{
s
.
logger
.
Debugf
(
"soc upload: create content addressed chunk: %v"
,
err
)
s
.
logger
.
Debugf
(
"soc upload: create content addressed chunk: %v"
,
err
)
s
.
logger
.
Error
(
"soc upload: chunk data error"
)
s
.
logger
.
Error
(
"soc upload: chunk data error"
)
...
@@ -117,18 +115,3 @@ func (s *server) socUploadHandler(w http.ResponseWriter, r *http.Request) {
...
@@ -117,18 +115,3 @@ func (s *server) socUploadHandler(w http.ResponseWriter, r *http.Request) {
jsonhttp
.
Created
(
w
,
chunkAddressResponse
{
Reference
:
chunk
.
Address
()})
jsonhttp
.
Created
(
w
,
chunkAddressResponse
{
Reference
:
chunk
.
Address
()})
}
}
func
chunk
(
data
[]
byte
)
(
swarm
.
Chunk
,
error
)
{
hasher
:=
bmtpool
.
Get
()
defer
bmtpool
.
Put
(
hasher
)
err
:=
hasher
.
SetSpanBytes
(
data
[
:
swarm
.
SpanSize
])
if
err
!=
nil
{
return
nil
,
err
}
_
,
err
=
hasher
.
Write
(
data
[
swarm
.
SpanSize
:
])
if
err
!=
nil
{
return
nil
,
err
}
return
swarm
.
NewChunk
(
swarm
.
NewAddress
(
hasher
.
Sum
(
nil
)),
data
),
nil
}
pkg/cac/cac.go
View file @
f56d2edb
...
@@ -2,25 +2,72 @@ package cac
...
@@ -2,25 +2,72 @@ package cac
import
(
import
(
"encoding/binary"
"encoding/binary"
"errors"
"github.com/ethersphere/bee/pkg/bmtpool"
"github.com/ethersphere/bee/pkg/bmtpool"
"github.com/ethersphere/bee/pkg/swarm"
"github.com/ethersphere/bee/pkg/swarm"
)
)
var
(
errTooShortChunkData
=
errors
.
New
(
"short chunk data"
)
errTooLargeChunkData
=
errors
.
New
(
"data too large"
)
)
// New creates a new content address chunk by initializing a span and appending the data to it.
func
New
(
data
[]
byte
)
(
swarm
.
Chunk
,
error
)
{
func
New
(
data
[]
byte
)
(
swarm
.
Chunk
,
error
)
{
hasher
:=
bmtpool
.
Get
()
dataLength
:=
len
(
data
)
defer
bmtpool
.
Put
(
hasher
)
if
dataLength
>
swarm
.
ChunkSize
{
return
nil
,
errTooLargeChunkData
}
_
,
err
:=
hasher
.
Write
(
data
)
if
dataLength
==
0
{
if
err
!=
nil
{
return
nil
,
errTooShortChunkData
return
nil
,
err
}
span
:=
make
([]
byte
,
swarm
.
SpanSize
)
binary
.
LittleEndian
.
PutUint64
(
span
,
uint64
(
dataLength
))
return
newWithSpan
(
data
,
span
)
}
// NewWithDataSpan creates a new chunk assuming that the span precedes the actual data.
func
NewWithDataSpan
(
data
[]
byte
)
(
swarm
.
Chunk
,
error
)
{
dataLength
:=
len
(
data
)
if
dataLength
>
swarm
.
ChunkSize
+
swarm
.
SpanSize
{
return
nil
,
errTooLargeChunkData
}
}
span
:=
make
([]
byte
,
8
)
binary
.
LittleEndian
.
PutUint64
(
span
,
uint64
(
len
(
data
)))
if
dataLength
<
swarm
.
SpanSize
{
err
=
hasher
.
SetSpanBytes
(
span
)
return
nil
,
errTooShortChunkData
}
return
newWithSpan
(
data
[
swarm
.
SpanSize
:
],
data
[
:
swarm
.
SpanSize
])
}
// newWithSpan creates a new chunk prepending the given span to the data.
func
newWithSpan
(
data
,
span
[]
byte
)
(
swarm
.
Chunk
,
error
)
{
h
:=
hasher
(
data
)
hash
,
err
:=
h
(
span
)
if
err
!=
nil
{
if
err
!=
nil
{
return
nil
,
err
return
nil
,
err
}
}
return
swarm
.
NewChunk
(
swarm
.
NewAddress
(
hasher
.
Sum
(
nil
)),
append
(
span
,
data
...
)),
nil
cdata
:=
make
([]
byte
,
len
(
data
)
+
len
(
span
))
copy
(
cdata
[
:
swarm
.
SpanSize
],
span
)
copy
(
cdata
[
swarm
.
SpanSize
:
],
data
)
return
swarm
.
NewChunk
(
swarm
.
NewAddress
(
hash
),
cdata
),
nil
}
// hasher is a helper function to hash a given data based on the given span.
func
hasher
(
data
[]
byte
)
func
([]
byte
)
([]
byte
,
error
)
{
return
func
(
span
[]
byte
)
([]
byte
,
error
)
{
hasher
:=
bmtpool
.
Get
()
defer
bmtpool
.
Put
(
hasher
)
if
err
:=
hasher
.
SetSpanBytes
(
span
);
err
!=
nil
{
return
nil
,
err
}
if
_
,
err
:=
hasher
.
Write
(
data
);
err
!=
nil
{
return
nil
,
err
}
return
hasher
.
Sum
(
nil
),
nil
}
}
}
pkg/cac/cac_test.go
View file @
f56d2edb
package
cac_test
package
cac_test
import
(
import
(
"bytes"
"encoding/binary"
"errors"
"fmt"
"strings"
"testing"
"testing"
"github.com/ethersphere/bee/pkg/cac"
"github.com/ethersphere/bee/pkg/cac"
"github.com/ethersphere/bee/pkg/swarm"
"github.com/ethersphere/bee/pkg/swarm"
)
)
func
Test
Cac
(
t
*
testing
.
T
)
{
func
Test
NewCAC
(
t
*
testing
.
T
)
{
bmtHashOfFoo
:=
"2387e8e7d8a48c2a9339c97c1dc3461a9a7aa07e994c5cb8b38fd7c1b3e6ea48"
data
:=
[]
byte
(
"greaterthanspan"
)
address
:=
swarm
.
MustParseHexAddress
(
bmtHashOfFoo
)
bmtHashOfData
:=
"27913f1bdb6e8e52cbd5a5fd4ab577c857287edf6969b41efe926b51de0f4f23"
foo
:=
"foo"
address
:=
swarm
.
MustParseHexAddress
(
bmtHashOfData
)
c
,
err
:=
cac
.
New
([]
byte
(
foo
))
expectedSpan
:=
make
([]
byte
,
swarm
.
SpanSize
)
binary
.
LittleEndian
.
PutUint64
(
expectedSpan
,
uint64
(
len
(
data
)))
expectedContent
:=
append
(
expectedSpan
,
data
...
)
c
,
err
:=
cac
.
New
(
data
)
if
err
!=
nil
{
t
.
Fatal
(
err
)
}
if
!
c
.
Address
()
.
Equal
(
address
)
{
t
.
Fatalf
(
"address mismatch. got %s want %s"
,
c
.
Address
()
.
String
(),
address
.
String
())
}
if
!
bytes
.
Equal
(
c
.
Data
(),
expectedContent
)
{
t
.
Fatalf
(
"chunk data mismatch. got %x want %x"
,
c
.
Data
(),
expectedContent
)
}
}
func
TestNewWithDataSpan
(
t
*
testing
.
T
)
{
data
:=
[]
byte
(
"greaterthanspan"
)
bmtHashOfData
:=
"95022e6af5c6d6a564ee55a67f8455a3e18c511b5697c932d9e44f07f2fb8c53"
address
:=
swarm
.
MustParseHexAddress
(
bmtHashOfData
)
c
,
err
:=
cac
.
NewWithDataSpan
(
data
)
if
err
!=
nil
{
if
err
!=
nil
{
t
.
Fatal
(
err
)
t
.
Fatal
(
err
)
}
}
...
@@ -20,4 +48,56 @@ func TestCac(t *testing.T) {
...
@@ -20,4 +48,56 @@ func TestCac(t *testing.T) {
if
!
c
.
Address
()
.
Equal
(
address
)
{
if
!
c
.
Address
()
.
Equal
(
address
)
{
t
.
Fatalf
(
"address mismatch. got %s want %s"
,
c
.
Address
()
.
String
(),
address
.
String
())
t
.
Fatalf
(
"address mismatch. got %s want %s"
,
c
.
Address
()
.
String
(),
address
.
String
())
}
}
if
!
bytes
.
Equal
(
c
.
Data
(),
data
)
{
t
.
Fatalf
(
"chunk data mismatch. got %x want %x"
,
c
.
Data
(),
data
)
}
}
func
TestChunkInvariants
(
t
*
testing
.
T
)
{
chunkerFunc
:=
[]
struct
{
name
string
chunker
func
(
data
[]
byte
)
(
swarm
.
Chunk
,
error
)
}{
{
name
:
"new cac"
,
chunker
:
cac
.
New
,
},
{
name
:
"new chunk with data span"
,
chunker
:
cac
.
NewWithDataSpan
,
},
}
for
_
,
f
:=
range
chunkerFunc
{
for
_
,
cc
:=
range
[]
struct
{
name
string
data
[]
byte
wantErr
error
}{
{
name
:
"zero data"
,
data
:
[]
byte
{},
wantErr
:
cac
.
ErrTooShortChunkData
,
},
{
name
:
"nil"
,
data
:
nil
,
wantErr
:
cac
.
ErrTooShortChunkData
,
},
{
name
:
"too large data chunk"
,
data
:
[]
byte
(
strings
.
Repeat
(
"a"
,
swarm
.
ChunkSize
+
swarm
.
SpanSize
+
1
)),
wantErr
:
cac
.
ErrTooLargeChunkData
,
},
}
{
testName
:=
fmt
.
Sprintf
(
"%s-%s"
,
f
.
name
,
cc
.
name
)
t
.
Run
(
testName
,
func
(
t
*
testing
.
T
)
{
_
,
err
:=
f
.
chunker
(
cc
.
data
)
if
!
errors
.
Is
(
err
,
cc
.
wantErr
)
{
t
.
Fatalf
(
"got %v want %v"
,
err
,
cc
.
wantErr
)
}
})
}
}
}
}
pkg/cac/export_test.go
0 → 100644
View file @
f56d2edb
// Copyright 2021 The Swarm Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package
cac
var
(
ErrTooShortChunkData
=
errTooShortChunkData
ErrTooLargeChunkData
=
errTooLargeChunkData
)
pkg/feeds/putter.go
View file @
f56d2edb
...
@@ -8,7 +8,7 @@ import (
...
@@ -8,7 +8,7 @@ import (
"context"
"context"
"encoding/binary"
"encoding/binary"
"github.com/ethersphere/bee/pkg/
bmtpool
"
"github.com/ethersphere/bee/pkg/
cac
"
"github.com/ethersphere/bee/pkg/crypto"
"github.com/ethersphere/bee/pkg/crypto"
"github.com/ethersphere/bee/pkg/soc"
"github.com/ethersphere/bee/pkg/soc"
"github.com/ethersphere/bee/pkg/storage"
"github.com/ethersphere/bee/pkg/storage"
...
@@ -57,21 +57,7 @@ func (u *Putter) Put(ctx context.Context, i Index, at int64, payload []byte) err
...
@@ -57,21 +57,7 @@ func (u *Putter) Put(ctx context.Context, i Index, at int64, payload []byte) err
}
}
func
toChunk
(
at
uint64
,
payload
[]
byte
)
(
swarm
.
Chunk
,
error
)
{
func
toChunk
(
at
uint64
,
payload
[]
byte
)
(
swarm
.
Chunk
,
error
)
{
hasher
:=
bmtpool
.
Get
()
defer
bmtpool
.
Put
(
hasher
)
ts
:=
make
([]
byte
,
8
)
ts
:=
make
([]
byte
,
8
)
binary
.
BigEndian
.
PutUint64
(
ts
,
at
)
binary
.
BigEndian
.
PutUint64
(
ts
,
at
)
content
:=
append
(
ts
,
payload
...
)
return
cac
.
New
(
append
(
ts
,
payload
...
))
_
,
err
:=
hasher
.
Write
(
content
)
if
err
!=
nil
{
return
nil
,
err
}
span
:=
make
([]
byte
,
8
)
binary
.
LittleEndian
.
PutUint64
(
span
,
uint64
(
len
(
content
)))
err
=
hasher
.
SetSpanBytes
(
span
)
if
err
!=
nil
{
return
nil
,
err
}
return
swarm
.
NewChunk
(
swarm
.
NewAddress
(
hasher
.
Sum
(
nil
)),
append
(
append
([]
byte
{},
span
...
),
content
...
)),
nil
}
}
pkg/file/splitter/internal/job.go
View file @
f56d2edb
...
@@ -10,7 +10,7 @@ import (
...
@@ -10,7 +10,7 @@ import (
"errors"
"errors"
"fmt"
"fmt"
"github.com/ethersphere/bee/pkg/
bmtpool
"
"github.com/ethersphere/bee/pkg/
cac
"
"github.com/ethersphere/bee/pkg/encryption"
"github.com/ethersphere/bee/pkg/encryption"
"github.com/ethersphere/bee/pkg/file"
"github.com/ethersphere/bee/pkg/file"
"github.com/ethersphere/bee/pkg/sctx"
"github.com/ethersphere/bee/pkg/sctx"
...
@@ -136,9 +136,8 @@ func (s *SimpleSplitterJob) sumLevel(lvl int) ([]byte, error) {
...
@@ -136,9 +136,8 @@ func (s *SimpleSplitterJob) sumLevel(lvl int) ([]byte, error) {
span
:=
(
s
.
length
-
1
)
%
spanSize
+
1
span
:=
(
s
.
length
-
1
)
%
spanSize
+
1
var
chunkData
[]
byte
var
chunkData
[]
byte
var
addr
swarm
.
Address
head
:=
make
([]
byte
,
8
)
head
:=
make
([]
byte
,
swarm
.
SpanSize
)
binary
.
LittleEndian
.
PutUint64
(
head
,
uint64
(
span
))
binary
.
LittleEndian
.
PutUint64
(
head
,
uint64
(
span
))
tail
:=
s
.
buffer
[
s
.
cursors
[
lvl
+
1
]
:
s
.
cursors
[
lvl
]]
tail
:=
s
.
buffer
[
s
.
cursors
[
lvl
+
1
]
:
s
.
cursors
[
lvl
]]
chunkData
=
append
(
head
,
tail
...
)
chunkData
=
append
(
head
,
tail
...
)
...
@@ -157,29 +156,14 @@ func (s *SimpleSplitterJob) sumLevel(lvl int) ([]byte, error) {
...
@@ -157,29 +156,14 @@ func (s *SimpleSplitterJob) sumLevel(lvl int) ([]byte, error) {
}
}
}
}
hasher
:=
bmtpool
.
Get
()
ch
,
err
:=
cac
.
NewWithDataSpan
(
c
)
err
=
hasher
.
SetSpanBytes
(
c
[
:
8
])
if
err
!=
nil
{
bmtpool
.
Put
(
hasher
)
return
nil
,
err
}
_
,
err
=
hasher
.
Write
(
c
[
8
:
])
if
err
!=
nil
{
if
err
!=
nil
{
bmtpool
.
Put
(
hasher
)
return
nil
,
err
return
nil
,
err
}
}
ref
:=
hasher
.
Sum
(
nil
)
bmtpool
.
Put
(
hasher
)
addr
=
swarm
.
NewAddress
(
ref
)
// Add tag to the chunk if tag is valid
// Add tag to the chunk if tag is valid
var
ch
swarm
.
Chunk
if
s
.
tag
!=
nil
{
if
s
.
tag
!=
nil
{
ch
=
swarm
.
NewChunk
(
addr
,
c
)
.
WithTagID
(
s
.
tag
.
Uid
)
ch
=
ch
.
WithTagID
(
s
.
tag
.
Uid
)
}
else
{
ch
=
swarm
.
NewChunk
(
addr
,
c
)
}
}
seen
,
err
:=
s
.
putter
.
Put
(
s
.
ctx
,
ch
)
seen
,
err
:=
s
.
putter
.
Put
(
s
.
ctx
,
ch
)
...
...
pkg/soc/export_test.go
View file @
f56d2edb
...
@@ -5,7 +5,6 @@
...
@@ -5,7 +5,6 @@
package
soc
package
soc
var
(
var
(
ToSignDigest
=
toSignDigest
ToSignDigest
=
toSignDigest
RecoverAddress
=
recoverAddress
RecoverAddress
=
recoverAddress
ContentAddressedChunk
=
contentAddressedChunk
)
)
pkg/soc/soc.go
View file @
f56d2edb
...
@@ -11,7 +11,7 @@ import (
...
@@ -11,7 +11,7 @@ import (
"errors"
"errors"
"fmt"
"fmt"
"github.com/ethersphere/bee/pkg/
bmtpool
"
"github.com/ethersphere/bee/pkg/
cac
"
"github.com/ethersphere/bee/pkg/crypto"
"github.com/ethersphere/bee/pkg/crypto"
"github.com/ethersphere/bee/pkg/swarm"
"github.com/ethersphere/bee/pkg/swarm"
)
)
...
@@ -151,10 +151,7 @@ func FromChunk(sch swarm.Chunk) (*Soc, error) {
...
@@ -151,10 +151,7 @@ func FromChunk(sch swarm.Chunk) (*Soc, error) {
s
.
signature
=
chunkData
[
cursor
:
cursor
+
SignatureSize
]
s
.
signature
=
chunkData
[
cursor
:
cursor
+
SignatureSize
]
cursor
+=
SignatureSize
cursor
+=
SignatureSize
spanBytes
:=
chunkData
[
cursor
:
cursor
+
swarm
.
SpanSize
]
ch
,
err
:=
cac
.
NewWithDataSpan
(
chunkData
[
cursor
:
])
cursor
+=
swarm
.
SpanSize
ch
,
err
:=
contentAddressedChunk
(
chunkData
[
cursor
:
],
spanBytes
)
if
err
!=
nil
{
if
err
!=
nil
{
return
nil
,
err
return
nil
,
err
}
}
...
@@ -258,23 +255,3 @@ func recoverAddress(signature, digest []byte) ([]byte, error) {
...
@@ -258,23 +255,3 @@ func recoverAddress(signature, digest []byte) ([]byte, error) {
}
}
return
recoveredEthereumAddress
,
nil
return
recoveredEthereumAddress
,
nil
}
}
func
contentAddressedChunk
(
data
,
spanBytes
[]
byte
)
(
swarm
.
Chunk
,
error
)
{
hasher
:=
bmtpool
.
Get
()
defer
bmtpool
.
Put
(
hasher
)
// execute hash, compare and return result
err
:=
hasher
.
SetSpanBytes
(
spanBytes
)
if
err
!=
nil
{
return
nil
,
err
}
_
,
err
=
hasher
.
Write
(
data
)
if
err
!=
nil
{
return
nil
,
err
}
s
:=
hasher
.
Sum
(
nil
)
payload
:=
append
(
append
([]
byte
{},
spanBytes
...
),
data
...
)
address
:=
swarm
.
NewAddress
(
s
)
return
swarm
.
NewChunk
(
address
,
payload
),
nil
}
pkg/soc/soc_test.go
View file @
f56d2edb
...
@@ -9,6 +9,7 @@ import (
...
@@ -9,6 +9,7 @@ import (
"encoding/binary"
"encoding/binary"
"testing"
"testing"
"github.com/ethersphere/bee/pkg/cac"
"github.com/ethersphere/bee/pkg/crypto"
"github.com/ethersphere/bee/pkg/crypto"
"github.com/ethersphere/bee/pkg/soc"
"github.com/ethersphere/bee/pkg/soc"
"github.com/ethersphere/bee/pkg/swarm"
"github.com/ethersphere/bee/pkg/swarm"
...
@@ -26,7 +27,7 @@ func TestToChunk(t *testing.T) {
...
@@ -26,7 +27,7 @@ func TestToChunk(t *testing.T) {
id
:=
make
([]
byte
,
32
)
id
:=
make
([]
byte
,
32
)
payload
:=
[]
byte
(
"foo"
)
payload
:=
[]
byte
(
"foo"
)
ch
,
err
:=
c
hunk
(
payload
)
ch
,
err
:=
c
ac
.
New
(
payload
)
if
err
!=
nil
{
if
err
!=
nil
{
t
.
Fatal
(
err
)
t
.
Fatal
(
err
)
}
}
...
@@ -93,7 +94,7 @@ func TestFromChunk(t *testing.T) {
...
@@ -93,7 +94,7 @@ func TestFromChunk(t *testing.T) {
id
:=
make
([]
byte
,
32
)
id
:=
make
([]
byte
,
32
)
payload
:=
[]
byte
(
"foo"
)
payload
:=
[]
byte
(
"foo"
)
ch
,
err
:=
c
hunk
(
payload
)
ch
,
err
:=
c
ac
.
New
(
payload
)
if
err
!=
nil
{
if
err
!=
nil
{
t
.
Fatal
(
err
)
t
.
Fatal
(
err
)
}
}
...
@@ -121,9 +122,3 @@ func TestFromChunk(t *testing.T) {
...
@@ -121,9 +122,3 @@ func TestFromChunk(t *testing.T) {
t
.
Fatalf
(
"owner address mismatch %x %x"
,
ownerEthereumAddress
,
u2
.
OwnerAddress
())
t
.
Fatalf
(
"owner address mismatch %x %x"
,
ownerEthereumAddress
,
u2
.
OwnerAddress
())
}
}
}
}
func
chunk
(
data
[]
byte
)
(
swarm
.
Chunk
,
error
)
{
span
:=
make
([]
byte
,
swarm
.
SpanSize
)
binary
.
LittleEndian
.
PutUint64
(
span
,
uint64
(
len
(
data
)))
return
soc
.
ContentAddressedChunk
(
data
,
span
)
}
pkg/storage/testing/chunk.go
View file @
f56d2edb
...
@@ -17,11 +17,10 @@
...
@@ -17,11 +17,10 @@
package
testing
package
testing
import
(
import
(
"encoding/binary"
"math/rand"
"math/rand"
"time"
"time"
"github.com/ethersphere/bee/pkg/
bmtpool
"
"github.com/ethersphere/bee/pkg/
cac
"
"github.com/ethersphere/bee/pkg/swarm"
"github.com/ethersphere/bee/pkg/swarm"
)
)
...
@@ -55,24 +54,8 @@ func init() {
...
@@ -55,24 +54,8 @@ func init() {
func
GenerateTestRandomChunk
()
swarm
.
Chunk
{
func
GenerateTestRandomChunk
()
swarm
.
Chunk
{
data
:=
make
([]
byte
,
swarm
.
ChunkSize
)
data
:=
make
([]
byte
,
swarm
.
ChunkSize
)
_
,
_
=
rand
.
Read
(
data
)
_
,
_
=
rand
.
Read
(
data
)
span
:=
make
([]
byte
,
swarm
.
SpanSize
)
ch
,
_
:=
cac
.
New
(
data
)
binary
.
LittleEndian
.
PutUint64
(
span
,
uint64
(
len
(
data
)))
return
ch
data
=
append
(
span
,
data
...
)
hasher
:=
bmtpool
.
Get
()
defer
bmtpool
.
Put
(
hasher
)
err
:=
hasher
.
SetSpanBytes
(
data
[
:
swarm
.
SpanSize
])
if
err
!=
nil
{
panic
(
err
)
}
_
,
err
=
hasher
.
Write
(
data
[
swarm
.
SpanSize
:
])
if
err
!=
nil
{
panic
(
err
)
}
ref
:=
hasher
.
Sum
(
nil
)
return
swarm
.
NewChunk
(
swarm
.
NewAddress
(
ref
),
data
)
}
}
// GenerateTestRandomInvalidChunk generates a random, however invalid, content
// GenerateTestRandomInvalidChunk generates a random, however invalid, content
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment