Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
M
mybee
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
vicotor
mybee
Commits
1236d45d
Unverified
Commit
1236d45d
authored
Oct 14, 2020
by
acud
Committed by
GitHub
Oct 14, 2020
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
all: use bmt hasher pool (#823)
* use bmtpool
parent
61c70837
Changes
11
Hide whitespace changes
Inline
Side-by-side
Showing
11 changed files
with
84 additions
and
65 deletions
+84
-65
go.mod
go.mod
+1
-1
go.sum
go.sum
+2
-2
bmtpool.go
pkg/bmtpool/bmtpool.go
+28
-0
content.go
pkg/content/content.go
+5
-5
bmt.go
pkg/file/pipeline/bmt/bmt.go
+10
-15
bmt_test.go
pkg/file/pipeline/bmt/bmt_test.go
+2
-2
builder.go
pkg/file/pipeline/builder/builder.go
+4
-4
job.go
pkg/file/splitter/internal/job.go
+15
-20
node.go
pkg/node/node.go
+13
-13
trojan.go
pkg/pss/trojan.go
+3
-3
swarm.go
pkg/swarm/swarm.go
+1
-0
No files found.
go.mod
View file @
1236d45d
...
@@ -8,7 +8,7 @@ require (
...
@@ -8,7 +8,7 @@ require (
github.com/coreos/go-semver v0.3.0
github.com/coreos/go-semver v0.3.0
github.com/davidlazar/go-crypto v0.0.0-20200604182044-b73af7476f6c // indirect
github.com/davidlazar/go-crypto v0.0.0-20200604182044-b73af7476f6c // indirect
github.com/ethereum/go-ethereum v1.9.20
github.com/ethereum/go-ethereum v1.9.20
github.com/ethersphere/bmt v0.1.
2
github.com/ethersphere/bmt v0.1.
4
github.com/ethersphere/langos v1.0.0
github.com/ethersphere/langos v1.0.0
github.com/ethersphere/manifest v0.3.2
github.com/ethersphere/manifest v0.3.2
github.com/ethersphere/sw3-bindings/v2 v2.1.0
github.com/ethersphere/sw3-bindings/v2 v2.1.0
...
...
go.sum
View file @
1236d45d
...
@@ -163,8 +163,8 @@ github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7
...
@@ -163,8 +163,8 @@ github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7
github.com/ethereum/go-ethereum v1.9.14/go.mod h1:oP8FC5+TbICUyftkTWs+8JryntjIJLJvWvApK3z2AYw=
github.com/ethereum/go-ethereum v1.9.14/go.mod h1:oP8FC5+TbICUyftkTWs+8JryntjIJLJvWvApK3z2AYw=
github.com/ethereum/go-ethereum v1.9.20 h1:kk/J5OIoaoz3DRrCXznz3RGi212mHHXwzXlY/ZQxcj0=
github.com/ethereum/go-ethereum v1.9.20 h1:kk/J5OIoaoz3DRrCXznz3RGi212mHHXwzXlY/ZQxcj0=
github.com/ethereum/go-ethereum v1.9.20/go.mod h1:JSSTypSMTkGZtAdAChH2wP5dZEvPGh3nUTuDpH+hNrg=
github.com/ethereum/go-ethereum v1.9.20/go.mod h1:JSSTypSMTkGZtAdAChH2wP5dZEvPGh3nUTuDpH+hNrg=
github.com/ethersphere/bmt v0.1.
2 h1:FEuvQY9xuK+rDp3VwDVyde8T396Matv/u9PdtKa2r9
Q=
github.com/ethersphere/bmt v0.1.
4 h1:+rkWYNtMgDx6bkNqGdWu+U9DgGI1rRZplpSW3YhBr1
Q=
github.com/ethersphere/bmt v0.1.
2/go.mod h1:fqRBDmYwn3lX2MH4lkImXQgFWeNP8ikLkS/hgi/HRws
=
github.com/ethersphere/bmt v0.1.
4/go.mod h1:Yd8ft1U69WDuHevZc/rwPxUv1rzPSMpMnS6xbU53aY8
=
github.com/ethersphere/langos v1.0.0 h1:NBtNKzXTTRSue95uOlzPN4py7Aofs0xWPzyj4AI1Vcc=
github.com/ethersphere/langos v1.0.0 h1:NBtNKzXTTRSue95uOlzPN4py7Aofs0xWPzyj4AI1Vcc=
github.com/ethersphere/langos v1.0.0/go.mod h1:dlcN2j4O8sQ+BlCaxeBu43bgr4RQ+inJ+pHwLeZg5Tw=
github.com/ethersphere/langos v1.0.0/go.mod h1:dlcN2j4O8sQ+BlCaxeBu43bgr4RQ+inJ+pHwLeZg5Tw=
github.com/ethersphere/manifest v0.3.2 h1:IusNNfpqde2F7uWZ2DE9eyo9PMwUAMop3Ws1NBcdMyM=
github.com/ethersphere/manifest v0.3.2 h1:IusNNfpqde2F7uWZ2DE9eyo9PMwUAMop3Ws1NBcdMyM=
...
...
pkg/bmtpool/bmtpool.go
0 → 100644
View file @
1236d45d
// Copyright 2020 The Swarm Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package
bmtpool
import
(
"github.com/ethersphere/bee/pkg/swarm"
bmtlegacy
"github.com/ethersphere/bmt/legacy"
"github.com/ethersphere/bmt/pool"
)
var
instance
pool
.
Pooler
func
init
()
{
instance
=
pool
.
New
(
8
,
swarm
.
BmtBranches
)
}
// Get a bmt Hasher instance.
// Instances are reset before being returned to the caller.
func
Get
()
*
bmtlegacy
.
Hasher
{
return
instance
.
Get
()
}
// Put a bmt Hasher back into the pool
func
Put
(
h
*
bmtlegacy
.
Hasher
)
{
instance
.
Put
(
h
)
}
pkg/content/content.go
View file @
1236d45d
...
@@ -10,8 +10,8 @@ import (
...
@@ -10,8 +10,8 @@ import (
"errors"
"errors"
"fmt"
"fmt"
"github.com/ethersphere/bee/pkg/bmtpool"
"github.com/ethersphere/bee/pkg/swarm"
"github.com/ethersphere/bee/pkg/swarm"
bmtlegacy
"github.com/ethersphere/bmt/legacy"
)
)
// NewChunk creates a new content-addressed single-span chunk.
// NewChunk creates a new content-addressed single-span chunk.
...
@@ -29,8 +29,8 @@ func NewChunkWithSpan(data []byte, span int64) (swarm.Chunk, error) {
...
@@ -29,8 +29,8 @@ func NewChunkWithSpan(data []byte, span int64) (swarm.Chunk, error) {
return
nil
,
fmt
.
Errorf
(
"single-span chunk size mismatch; span is %d, chunk data length %d"
,
span
,
len
(
data
))
return
nil
,
fmt
.
Errorf
(
"single-span chunk size mismatch; span is %d, chunk data length %d"
,
span
,
len
(
data
))
}
}
bmtPool
:=
bmtlegacy
.
NewTreePool
(
swarm
.
NewHasher
,
swarm
.
Branches
,
bmtlegacy
.
PoolSize
)
hasher
:=
bmtpool
.
Get
(
)
hasher
:=
bmtlegacy
.
New
(
bmtPool
)
defer
bmtpool
.
Put
(
hasher
)
// execute hash, compare and return result
// execute hash, compare and return result
spanBytes
:=
make
([]
byte
,
8
)
spanBytes
:=
make
([]
byte
,
8
)
...
@@ -53,8 +53,8 @@ func NewChunkWithSpan(data []byte, span int64) (swarm.Chunk, error) {
...
@@ -53,8 +53,8 @@ func NewChunkWithSpan(data []byte, span int64) (swarm.Chunk, error) {
// NewChunkWithSpanBytes deserializes a content-addressed chunk from separate
// NewChunkWithSpanBytes deserializes a content-addressed chunk from separate
// data and span byte slices.
// data and span byte slices.
func
NewChunkWithSpanBytes
(
data
,
spanBytes
[]
byte
)
(
swarm
.
Chunk
,
error
)
{
func
NewChunkWithSpanBytes
(
data
,
spanBytes
[]
byte
)
(
swarm
.
Chunk
,
error
)
{
bmtPool
:=
bmtlegacy
.
NewTreePool
(
swarm
.
NewHasher
,
swarm
.
Branches
,
bmtlegacy
.
PoolSize
)
hasher
:=
bmtpool
.
Get
(
)
hasher
:=
bmtlegacy
.
New
(
bmtPool
)
defer
bmtpool
.
Put
(
hasher
)
// execute hash, compare and return result
// execute hash, compare and return result
err
:=
hasher
.
SetSpanBytes
(
spanBytes
)
err
:=
hasher
.
SetSpanBytes
(
spanBytes
)
...
...
pkg/file/pipeline/bmt/bmt.go
View file @
1236d45d
...
@@ -6,13 +6,10 @@ package bmt
...
@@ -6,13 +6,10 @@ package bmt
import
(
import
(
"errors"
"errors"
"hash"
"github.com/ethersphere/bee/pkg/bmtpool"
"github.com/ethersphere/bee/pkg/file/pipeline"
"github.com/ethersphere/bee/pkg/file/pipeline"
"github.com/ethersphere/bee/pkg/swarm"
"github.com/ethersphere/bee/pkg/swarm"
"github.com/ethersphere/bmt"
bmtlegacy
"github.com/ethersphere/bmt/legacy"
"golang.org/x/crypto/sha3"
)
)
var
(
var
(
...
@@ -20,15 +17,13 @@ var (
...
@@ -20,15 +17,13 @@ var (
)
)
type
bmtWriter
struct
{
type
bmtWriter
struct
{
b
bmt
.
Hash
next
pipeline
.
ChainWriter
next
pipeline
.
ChainWriter
}
}
// NewBmtWriter returns a new bmtWriter. Partial writes are not supported.
// NewBmtWriter returns a new bmtWriter. Partial writes are not supported.
// Note: branching factor is the BMT branching factor, not the merkle trie branching factor.
// Note: branching factor is the BMT branching factor, not the merkle trie branching factor.
func
NewBmtWriter
(
branches
int
,
next
pipeline
.
ChainWriter
)
pipeline
.
ChainWriter
{
func
NewBmtWriter
(
next
pipeline
.
ChainWriter
)
pipeline
.
ChainWriter
{
return
&
bmtWriter
{
return
&
bmtWriter
{
b
:
bmtlegacy
.
New
(
bmtlegacy
.
NewTreePool
(
hashFunc
,
branches
,
bmtlegacy
.
PoolSize
)),
next
:
next
,
next
:
next
,
}
}
}
}
...
@@ -39,16 +34,20 @@ func (w *bmtWriter) ChainWrite(p *pipeline.PipeWriteArgs) error {
...
@@ -39,16 +34,20 @@ func (w *bmtWriter) ChainWrite(p *pipeline.PipeWriteArgs) error {
if
len
(
p
.
Data
)
<
swarm
.
SpanSize
{
if
len
(
p
.
Data
)
<
swarm
.
SpanSize
{
return
errInvalidData
return
errInvalidData
}
}
w
.
b
.
Res
et
()
hasher
:=
bmtpool
.
G
et
()
err
:=
w
.
b
.
SetSpanBytes
(
p
.
Data
[
:
swarm
.
SpanSize
])
err
:=
hasher
.
SetSpanBytes
(
p
.
Data
[
:
swarm
.
SpanSize
])
if
err
!=
nil
{
if
err
!=
nil
{
bmtpool
.
Put
(
hasher
)
return
err
return
err
}
}
_
,
err
=
w
.
b
.
Write
(
p
.
Data
[
swarm
.
SpanSize
:
])
_
,
err
=
hasher
.
Write
(
p
.
Data
[
swarm
.
SpanSize
:
])
if
err
!=
nil
{
if
err
!=
nil
{
bmtpool
.
Put
(
hasher
)
return
err
return
err
}
}
p
.
Ref
=
w
.
b
.
Sum
(
nil
)
p
.
Ref
=
hasher
.
Sum
(
nil
)
bmtpool
.
Put
(
hasher
)
return
w
.
next
.
ChainWrite
(
p
)
return
w
.
next
.
ChainWrite
(
p
)
}
}
...
@@ -56,7 +55,3 @@ func (w *bmtWriter) ChainWrite(p *pipeline.PipeWriteArgs) error {
...
@@ -56,7 +55,3 @@ func (w *bmtWriter) ChainWrite(p *pipeline.PipeWriteArgs) error {
func
(
w
*
bmtWriter
)
Sum
()
([]
byte
,
error
)
{
func
(
w
*
bmtWriter
)
Sum
()
([]
byte
,
error
)
{
return
w
.
next
.
Sum
()
return
w
.
next
.
Sum
()
}
}
func
hashFunc
()
hash
.
Hash
{
return
sha3
.
NewLegacyKeccak256
()
}
pkg/file/pipeline/bmt/bmt_test.go
View file @
1236d45d
...
@@ -47,7 +47,7 @@ func TestBmtWriter(t *testing.T) {
...
@@ -47,7 +47,7 @@ func TestBmtWriter(t *testing.T) {
}
{
}
{
t
.
Run
(
tc
.
name
,
func
(
t
*
testing
.
T
)
{
t
.
Run
(
tc
.
name
,
func
(
t
*
testing
.
T
)
{
mockChainWriter
:=
mock
.
NewChainWriter
()
mockChainWriter
:=
mock
.
NewChainWriter
()
writer
:=
bmt
.
NewBmtWriter
(
128
,
mockChainWriter
)
writer
:=
bmt
.
NewBmtWriter
(
mockChainWriter
)
var
data
[]
byte
var
data
[]
byte
...
@@ -81,7 +81,7 @@ func TestBmtWriter(t *testing.T) {
...
@@ -81,7 +81,7 @@ func TestBmtWriter(t *testing.T) {
// TestSum tests that calling Sum on the writer calls the next writer's Sum.
// TestSum tests that calling Sum on the writer calls the next writer's Sum.
func
TestSum
(
t
*
testing
.
T
)
{
func
TestSum
(
t
*
testing
.
T
)
{
mockChainWriter
:=
mock
.
NewChainWriter
()
mockChainWriter
:=
mock
.
NewChainWriter
()
writer
:=
bmt
.
NewBmtWriter
(
128
,
mockChainWriter
)
writer
:=
bmt
.
NewBmtWriter
(
mockChainWriter
)
_
,
err
:=
writer
.
Sum
()
_
,
err
:=
writer
.
Sum
()
if
err
!=
nil
{
if
err
!=
nil
{
t
.
Fatal
(
err
)
t
.
Fatal
(
err
)
...
...
pkg/file/pipeline/builder/builder.go
View file @
1236d45d
...
@@ -34,7 +34,7 @@ func NewPipelineBuilder(ctx context.Context, s storage.Storer, mode storage.Mode
...
@@ -34,7 +34,7 @@ func NewPipelineBuilder(ctx context.Context, s storage.Storer, mode storage.Mode
func
newPipeline
(
ctx
context
.
Context
,
s
storage
.
Storer
,
mode
storage
.
ModePut
)
pipeline
.
Interface
{
func
newPipeline
(
ctx
context
.
Context
,
s
storage
.
Storer
,
mode
storage
.
ModePut
)
pipeline
.
Interface
{
tw
:=
hashtrie
.
NewHashTrieWriter
(
swarm
.
ChunkSize
,
swarm
.
Branches
,
swarm
.
HashSize
,
newShortPipelineFunc
(
ctx
,
s
,
mode
))
tw
:=
hashtrie
.
NewHashTrieWriter
(
swarm
.
ChunkSize
,
swarm
.
Branches
,
swarm
.
HashSize
,
newShortPipelineFunc
(
ctx
,
s
,
mode
))
lsw
:=
store
.
NewStoreWriter
(
ctx
,
s
,
mode
,
tw
)
lsw
:=
store
.
NewStoreWriter
(
ctx
,
s
,
mode
,
tw
)
b
:=
bmt
.
NewBmtWriter
(
128
,
lsw
)
b
:=
bmt
.
NewBmtWriter
(
lsw
)
return
feeder
.
NewChunkFeederWriter
(
swarm
.
ChunkSize
,
b
)
return
feeder
.
NewChunkFeederWriter
(
swarm
.
ChunkSize
,
b
)
}
}
...
@@ -43,7 +43,7 @@ func newPipeline(ctx context.Context, s storage.Storer, mode storage.ModePut) pi
...
@@ -43,7 +43,7 @@ func newPipeline(ctx context.Context, s storage.Storer, mode storage.ModePut) pi
func
newShortPipelineFunc
(
ctx
context
.
Context
,
s
storage
.
Storer
,
mode
storage
.
ModePut
)
func
()
pipeline
.
ChainWriter
{
func
newShortPipelineFunc
(
ctx
context
.
Context
,
s
storage
.
Storer
,
mode
storage
.
ModePut
)
func
()
pipeline
.
ChainWriter
{
return
func
()
pipeline
.
ChainWriter
{
return
func
()
pipeline
.
ChainWriter
{
lsw
:=
store
.
NewStoreWriter
(
ctx
,
s
,
mode
,
nil
)
lsw
:=
store
.
NewStoreWriter
(
ctx
,
s
,
mode
,
nil
)
return
bmt
.
NewBmtWriter
(
128
,
lsw
)
return
bmt
.
NewBmtWriter
(
lsw
)
}
}
}
}
...
@@ -55,7 +55,7 @@ func newShortPipelineFunc(ctx context.Context, s storage.Storer, mode storage.Mo
...
@@ -55,7 +55,7 @@ func newShortPipelineFunc(ctx context.Context, s storage.Storer, mode storage.Mo
func
newEncryptionPipeline
(
ctx
context
.
Context
,
s
storage
.
Storer
,
mode
storage
.
ModePut
)
pipeline
.
Interface
{
func
newEncryptionPipeline
(
ctx
context
.
Context
,
s
storage
.
Storer
,
mode
storage
.
ModePut
)
pipeline
.
Interface
{
tw
:=
hashtrie
.
NewHashTrieWriter
(
swarm
.
ChunkSize
,
64
,
swarm
.
HashSize
+
encryption
.
KeyLength
,
newShortEncryptionPipelineFunc
(
ctx
,
s
,
mode
))
tw
:=
hashtrie
.
NewHashTrieWriter
(
swarm
.
ChunkSize
,
64
,
swarm
.
HashSize
+
encryption
.
KeyLength
,
newShortEncryptionPipelineFunc
(
ctx
,
s
,
mode
))
lsw
:=
store
.
NewStoreWriter
(
ctx
,
s
,
mode
,
tw
)
lsw
:=
store
.
NewStoreWriter
(
ctx
,
s
,
mode
,
tw
)
b
:=
bmt
.
NewBmtWriter
(
128
,
lsw
)
b
:=
bmt
.
NewBmtWriter
(
lsw
)
enc
:=
enc
.
NewEncryptionWriter
(
encryption
.
NewChunkEncrypter
(),
b
)
enc
:=
enc
.
NewEncryptionWriter
(
encryption
.
NewChunkEncrypter
(),
b
)
return
feeder
.
NewChunkFeederWriter
(
swarm
.
ChunkSize
,
enc
)
return
feeder
.
NewChunkFeederWriter
(
swarm
.
ChunkSize
,
enc
)
}
}
...
@@ -65,7 +65,7 @@ func newEncryptionPipeline(ctx context.Context, s storage.Storer, mode storage.M
...
@@ -65,7 +65,7 @@ func newEncryptionPipeline(ctx context.Context, s storage.Storer, mode storage.M
func
newShortEncryptionPipelineFunc
(
ctx
context
.
Context
,
s
storage
.
Storer
,
mode
storage
.
ModePut
)
func
()
pipeline
.
ChainWriter
{
func
newShortEncryptionPipelineFunc
(
ctx
context
.
Context
,
s
storage
.
Storer
,
mode
storage
.
ModePut
)
func
()
pipeline
.
ChainWriter
{
return
func
()
pipeline
.
ChainWriter
{
return
func
()
pipeline
.
ChainWriter
{
lsw
:=
store
.
NewStoreWriter
(
ctx
,
s
,
mode
,
nil
)
lsw
:=
store
.
NewStoreWriter
(
ctx
,
s
,
mode
,
nil
)
b
:=
bmt
.
NewBmtWriter
(
128
,
lsw
)
b
:=
bmt
.
NewBmtWriter
(
lsw
)
return
enc
.
NewEncryptionWriter
(
encryption
.
NewChunkEncrypter
(),
b
)
return
enc
.
NewEncryptionWriter
(
encryption
.
NewChunkEncrypter
(),
b
)
}
}
}
}
...
...
pkg/file/splitter/internal/job.go
View file @
1236d45d
...
@@ -9,15 +9,13 @@ import (
...
@@ -9,15 +9,13 @@ import (
"encoding/binary"
"encoding/binary"
"errors"
"errors"
"fmt"
"fmt"
"hash"
"github.com/ethersphere/bee/pkg/bmtpool"
"github.com/ethersphere/bee/pkg/encryption"
"github.com/ethersphere/bee/pkg/encryption"
"github.com/ethersphere/bee/pkg/file"
"github.com/ethersphere/bee/pkg/file"
"github.com/ethersphere/bee/pkg/sctx"
"github.com/ethersphere/bee/pkg/sctx"
"github.com/ethersphere/bee/pkg/swarm"
"github.com/ethersphere/bee/pkg/swarm"
"github.com/ethersphere/bee/pkg/tags"
"github.com/ethersphere/bee/pkg/tags"
"github.com/ethersphere/bmt"
bmtlegacy
"github.com/ethersphere/bmt/legacy"
"golang.org/x/crypto/sha3"
"golang.org/x/crypto/sha3"
)
)
...
@@ -29,11 +27,6 @@ type Putter interface {
...
@@ -29,11 +27,6 @@ type Putter interface {
// (128 ^ (9 - 1)) * 4096 = 295147905179352825856 bytes
// (128 ^ (9 - 1)) * 4096 = 295147905179352825856 bytes
const
levelBufferLimit
=
9
const
levelBufferLimit
=
9
// hashFunc is a hasher factory used by the bmt hasher
func
hashFunc
()
hash
.
Hash
{
return
sha3
.
NewLegacyKeccak256
()
}
// SimpleSplitterJob encapsulated a single splitter operation, accepting blockwise
// SimpleSplitterJob encapsulated a single splitter operation, accepting blockwise
// writes of data whose length is defined in advance.
// writes of data whose length is defined in advance.
//
//
...
@@ -46,12 +39,11 @@ func hashFunc() hash.Hash {
...
@@ -46,12 +39,11 @@ func hashFunc() hash.Hash {
type
SimpleSplitterJob
struct
{
type
SimpleSplitterJob
struct
{
ctx
context
.
Context
ctx
context
.
Context
putter
Putter
putter
Putter
spanLength
int64
// target length of data
spanLength
int64
// target length of data
length
int64
// number of bytes written to the data level of the hasher
length
int64
// number of bytes written to the data level of the hasher
sumCounts
[]
int
// number of sums performed, indexed per level
sumCounts
[]
int
// number of sums performed, indexed per level
cursors
[]
int
// section write position, indexed per level
cursors
[]
int
// section write position, indexed per level
hasher
bmt
.
Hash
// underlying hasher used for hashing the tree
buffer
[]
byte
// keeps data and hashes, indexed by cursors
buffer
[]
byte
// keeps data and hashes, indexed by cursors
tag
*
tags
.
Tag
tag
*
tags
.
Tag
toEncrypt
bool
// to encryrpt the chunks or not
toEncrypt
bool
// to encryrpt the chunks or not
refSize
int64
refSize
int64
...
@@ -66,7 +58,6 @@ func NewSimpleSplitterJob(ctx context.Context, putter Putter, spanLength int64,
...
@@ -66,7 +58,6 @@ func NewSimpleSplitterJob(ctx context.Context, putter Putter, spanLength int64,
if
toEncrypt
{
if
toEncrypt
{
refSize
+=
encryption
.
KeyLength
refSize
+=
encryption
.
KeyLength
}
}
p
:=
bmtlegacy
.
NewTreePool
(
hashFunc
,
swarm
.
Branches
,
bmtlegacy
.
PoolSize
)
return
&
SimpleSplitterJob
{
return
&
SimpleSplitterJob
{
ctx
:
ctx
,
ctx
:
ctx
,
...
@@ -74,7 +65,6 @@ func NewSimpleSplitterJob(ctx context.Context, putter Putter, spanLength int64,
...
@@ -74,7 +65,6 @@ func NewSimpleSplitterJob(ctx context.Context, putter Putter, spanLength int64,
spanLength
:
spanLength
,
spanLength
:
spanLength
,
sumCounts
:
make
([]
int
,
levelBufferLimit
),
sumCounts
:
make
([]
int
,
levelBufferLimit
),
cursors
:
make
([]
int
,
levelBufferLimit
),
cursors
:
make
([]
int
,
levelBufferLimit
),
hasher
:
bmtlegacy
.
New
(
p
),
buffer
:
make
([]
byte
,
swarm
.
ChunkWithSpanSize
*
levelBufferLimit
*
2
),
// double size as temp workaround for weak calculation of needed buffer space
buffer
:
make
([]
byte
,
swarm
.
ChunkWithSpanSize
*
levelBufferLimit
*
2
),
// double size as temp workaround for weak calculation of needed buffer space
tag
:
sctx
.
GetTag
(
ctx
),
tag
:
sctx
.
GetTag
(
ctx
),
toEncrypt
:
toEncrypt
,
toEncrypt
:
toEncrypt
,
...
@@ -167,16 +157,21 @@ func (s *SimpleSplitterJob) sumLevel(lvl int) ([]byte, error) {
...
@@ -167,16 +157,21 @@ func (s *SimpleSplitterJob) sumLevel(lvl int) ([]byte, error) {
}
}
}
}
s
.
hasher
.
Reset
()
hasher
:=
bmtpool
.
Get
()
err
=
s
.
hasher
.
SetSpanBytes
(
c
[
:
8
])
err
=
hasher
.
SetSpanBytes
(
c
[
:
8
])
if
err
!=
nil
{
if
err
!=
nil
{
bmtpool
.
Put
(
hasher
)
return
nil
,
err
return
nil
,
err
}
}
_
,
err
=
s
.
hasher
.
Write
(
c
[
8
:
])
_
,
err
=
hasher
.
Write
(
c
[
8
:
])
if
err
!=
nil
{
if
err
!=
nil
{
bmtpool
.
Put
(
hasher
)
return
nil
,
err
return
nil
,
err
}
}
ref
:=
s
.
hasher
.
Sum
(
nil
)
ref
:=
hasher
.
Sum
(
nil
)
bmtpool
.
Put
(
hasher
)
addr
=
swarm
.
NewAddress
(
ref
)
addr
=
swarm
.
NewAddress
(
ref
)
// Add tag to the chunk if tag is valid
// Add tag to the chunk if tag is valid
...
...
pkg/node/node.go
View file @
1236d45d
...
@@ -345,8 +345,8 @@ func NewBee(addr string, swarmAddress swarm.Address, publicKey ecdsa.PublicKey,
...
@@ -345,8 +345,8 @@ func NewBee(addr string, swarmAddress swarm.Address, publicKey ecdsa.PublicKey,
chunkvalidator
:=
swarm
.
NewChunkValidator
(
content
.
NewValidator
(),
soc
.
NewValidator
())
chunkvalidator
:=
swarm
.
NewChunkValidator
(
content
.
NewValidator
(),
soc
.
NewValidator
())
retrieve
:=
retrieval
.
New
(
swarmAddress
,
storer
,
p2ps
,
kad
,
logger
,
acc
,
accounting
.
NewFixedPricer
(
swarmAddress
,
10
),
chunkvalidator
,
tracer
)
retrieve
:=
retrieval
.
New
(
swarmAddress
,
storer
,
p2ps
,
kad
,
logger
,
acc
,
accounting
.
NewFixedPricer
(
swarmAddress
,
10
),
chunkvalidator
,
tracer
)
tag
g
:=
tags
.
NewTags
(
stateStore
,
logger
)
tag
Service
:=
tags
.
NewTags
(
stateStore
,
logger
)
b
.
tagsCloser
=
tag
g
b
.
tagsCloser
=
tag
Service
if
err
=
p2ps
.
AddProtocol
(
retrieve
.
Protocol
());
err
!=
nil
{
if
err
=
p2ps
.
AddProtocol
(
retrieve
.
Protocol
());
err
!=
nil
{
return
nil
,
fmt
.
Errorf
(
"retrieval service: %w"
,
err
)
return
nil
,
fmt
.
Errorf
(
"retrieval service: %w"
,
err
)
...
@@ -358,22 +358,22 @@ func NewBee(addr string, swarmAddress swarm.Address, publicKey ecdsa.PublicKey,
...
@@ -358,22 +358,22 @@ func NewBee(addr string, swarmAddress swarm.Address, publicKey ecdsa.PublicKey,
return
nil
,
fmt
.
Errorf
(
"swarm key: %w"
,
err
)
return
nil
,
fmt
.
Errorf
(
"swarm key: %w"
,
err
)
}
}
pss
s
:=
pss
.
New
(
swarmPrivateKey
,
logger
)
pss
Service
:=
pss
.
New
(
swarmPrivateKey
,
logger
)
b
.
pssCloser
=
pss
s
b
.
pssCloser
=
pss
Service
var
ns
storage
.
Storer
var
ns
storage
.
Storer
if
o
.
GlobalPinningEnabled
{
if
o
.
GlobalPinningEnabled
{
// create recovery callback for content repair
// create recovery callback for content repair
recoverFunc
:=
recovery
.
NewRecoveryHook
(
pss
s
)
recoverFunc
:=
recovery
.
NewRecoveryHook
(
pss
Service
)
ns
=
netstore
.
New
(
storer
,
recoverFunc
,
retrieve
,
logger
,
chunkvalidator
)
ns
=
netstore
.
New
(
storer
,
recoverFunc
,
retrieve
,
logger
,
chunkvalidator
)
}
else
{
}
else
{
ns
=
netstore
.
New
(
storer
,
nil
,
retrieve
,
logger
,
chunkvalidator
)
ns
=
netstore
.
New
(
storer
,
nil
,
retrieve
,
logger
,
chunkvalidator
)
}
}
pushSyncProtocol
:=
pushsync
.
New
(
p2ps
,
storer
,
kad
,
tag
g
,
psss
.
TryUnwrap
,
logger
,
acc
,
accounting
.
NewFixedPricer
(
swarmAddress
,
10
),
tracer
)
pushSyncProtocol
:=
pushsync
.
New
(
p2ps
,
storer
,
kad
,
tag
Service
,
pssService
.
TryUnwrap
,
logger
,
acc
,
accounting
.
NewFixedPricer
(
swarmAddress
,
10
),
tracer
)
// set the pushSyncer in the PSS
// set the pushSyncer in the PSS
pss
s
.
SetPushSyncer
(
pushSyncProtocol
)
pss
Service
.
SetPushSyncer
(
pushSyncProtocol
)
if
err
=
p2ps
.
AddProtocol
(
pushSyncProtocol
.
Protocol
());
err
!=
nil
{
if
err
=
p2ps
.
AddProtocol
(
pushSyncProtocol
.
Protocol
());
err
!=
nil
{
return
nil
,
fmt
.
Errorf
(
"pushsync service: %w"
,
err
)
return
nil
,
fmt
.
Errorf
(
"pushsync service: %w"
,
err
)
...
@@ -382,10 +382,10 @@ func NewBee(addr string, swarmAddress swarm.Address, publicKey ecdsa.PublicKey,
...
@@ -382,10 +382,10 @@ func NewBee(addr string, swarmAddress swarm.Address, publicKey ecdsa.PublicKey,
if
o
.
GlobalPinningEnabled
{
if
o
.
GlobalPinningEnabled
{
// register function for chunk repair upon receiving a trojan message
// register function for chunk repair upon receiving a trojan message
chunkRepairHandler
:=
recovery
.
NewRepairHandler
(
ns
,
logger
,
pushSyncProtocol
)
chunkRepairHandler
:=
recovery
.
NewRepairHandler
(
ns
,
logger
,
pushSyncProtocol
)
b
.
recoveryHandleCleanup
=
pss
s
.
Register
(
recovery
.
RecoveryTopic
,
chunkRepairHandler
)
b
.
recoveryHandleCleanup
=
pss
Service
.
Register
(
recovery
.
RecoveryTopic
,
chunkRepairHandler
)
}
}
pushSyncPusher
:=
pusher
.
New
(
storer
,
kad
,
pushSyncProtocol
,
tag
g
,
logger
,
tracer
)
pushSyncPusher
:=
pusher
.
New
(
storer
,
kad
,
pushSyncProtocol
,
tag
Service
,
logger
,
tracer
)
b
.
pusherCloser
=
pushSyncPusher
b
.
pusherCloser
=
pushSyncPusher
pullStorage
:=
pullstorage
.
New
(
storer
)
pullStorage
:=
pullstorage
.
New
(
storer
)
...
@@ -410,7 +410,7 @@ func NewBee(addr string, swarmAddress swarm.Address, publicKey ecdsa.PublicKey,
...
@@ -410,7 +410,7 @@ func NewBee(addr string, swarmAddress swarm.Address, publicKey ecdsa.PublicKey,
var
apiService
api
.
Service
var
apiService
api
.
Service
if
o
.
APIAddr
!=
""
{
if
o
.
APIAddr
!=
""
{
// API server
// API server
apiService
=
api
.
New
(
tag
g
,
ns
,
multiResolver
,
psss
,
logger
,
tracer
,
api
.
Options
{
apiService
=
api
.
New
(
tag
Service
,
ns
,
multiResolver
,
pssService
,
logger
,
tracer
,
api
.
Options
{
CORSAllowedOrigins
:
o
.
CORSAllowedOrigins
,
CORSAllowedOrigins
:
o
.
CORSAllowedOrigins
,
GatewayMode
:
o
.
GatewayMode
,
GatewayMode
:
o
.
GatewayMode
,
WsPingPeriod
:
60
*
time
.
Second
,
WsPingPeriod
:
60
*
time
.
Second
,
...
@@ -441,7 +441,7 @@ func NewBee(addr string, swarmAddress swarm.Address, publicKey ecdsa.PublicKey,
...
@@ -441,7 +441,7 @@ func NewBee(addr string, swarmAddress swarm.Address, publicKey ecdsa.PublicKey,
if
o
.
DebugAPIAddr
!=
""
{
if
o
.
DebugAPIAddr
!=
""
{
// Debug API server
// Debug API server
debugAPIService
:=
debugapi
.
New
(
swarmAddress
,
publicKey
,
overlayEthAddress
,
p2ps
,
pingPong
,
kad
,
storer
,
logger
,
tracer
,
tag
g
,
acc
,
settlement
,
o
.
SwapEnable
,
swapService
,
chequebookService
)
debugAPIService
:=
debugapi
.
New
(
swarmAddress
,
publicKey
,
overlayEthAddress
,
p2ps
,
pingPong
,
kad
,
storer
,
logger
,
tracer
,
tag
Service
,
acc
,
settlement
,
o
.
SwapEnable
,
swapService
,
chequebookService
)
// register metrics from components
// register metrics from components
debugAPIService
.
MustRegisterMetrics
(
p2ps
.
Metrics
()
...
)
debugAPIService
.
MustRegisterMetrics
(
p2ps
.
Metrics
()
...
)
debugAPIService
.
MustRegisterMetrics
(
pingPong
.
Metrics
()
...
)
debugAPIService
.
MustRegisterMetrics
(
pingPong
.
Metrics
()
...
)
...
@@ -452,8 +452,8 @@ func NewBee(addr string, swarmAddress swarm.Address, publicKey ecdsa.PublicKey,
...
@@ -452,8 +452,8 @@ func NewBee(addr string, swarmAddress swarm.Address, publicKey ecdsa.PublicKey,
debugAPIService
.
MustRegisterMetrics
(
pushSyncPusher
.
Metrics
()
...
)
debugAPIService
.
MustRegisterMetrics
(
pushSyncPusher
.
Metrics
()
...
)
debugAPIService
.
MustRegisterMetrics
(
pullSync
.
Metrics
()
...
)
debugAPIService
.
MustRegisterMetrics
(
pullSync
.
Metrics
()
...
)
if
pssService
,
ok
:=
psss
.
(
metrics
.
Collector
);
ok
{
if
pssService
Metrics
,
ok
:=
pssService
.
(
metrics
.
Collector
);
ok
{
debugAPIService
.
MustRegisterMetrics
(
pssService
.
Metrics
()
...
)
debugAPIService
.
MustRegisterMetrics
(
pssService
Metrics
.
Metrics
()
...
)
}
}
if
apiService
!=
nil
{
if
apiService
!=
nil
{
...
...
pkg/pss/trojan.go
View file @
1236d45d
...
@@ -15,11 +15,11 @@ import (
...
@@ -15,11 +15,11 @@ import (
random
"math/rand"
random
"math/rand"
"github.com/btcsuite/btcd/btcec"
"github.com/btcsuite/btcd/btcec"
"github.com/ethersphere/bee/pkg/bmtpool"
"github.com/ethersphere/bee/pkg/crypto"
"github.com/ethersphere/bee/pkg/crypto"
"github.com/ethersphere/bee/pkg/encryption"
"github.com/ethersphere/bee/pkg/encryption"
"github.com/ethersphere/bee/pkg/encryption/elgamal"
"github.com/ethersphere/bee/pkg/encryption/elgamal"
"github.com/ethersphere/bee/pkg/swarm"
"github.com/ethersphere/bee/pkg/swarm"
bmtlegacy
"github.com/ethersphere/bmt/legacy"
)
)
var
(
var
(
...
@@ -176,10 +176,10 @@ func checkTargets(targets Targets) error {
...
@@ -176,10 +176,10 @@ func checkTargets(targets Targets) error {
}
}
func
hasher
(
span
,
b
[]
byte
)
func
([]
byte
)
([]
byte
,
error
)
{
func
hasher
(
span
,
b
[]
byte
)
func
([]
byte
)
([]
byte
,
error
)
{
hashPool
:=
bmtlegacy
.
NewTreePool
(
swarm
.
NewHasher
,
swarm
.
Branches
,
bmtlegacy
.
PoolSize
)
return
func
(
nonce
[]
byte
)
([]
byte
,
error
)
{
return
func
(
nonce
[]
byte
)
([]
byte
,
error
)
{
s
:=
append
(
nonce
,
b
...
)
s
:=
append
(
nonce
,
b
...
)
hasher
:=
bmtlegacy
.
New
(
hashPool
)
hasher
:=
bmtpool
.
Get
()
defer
bmtpool
.
Put
(
hasher
)
if
err
:=
hasher
.
SetSpanBytes
(
span
);
err
!=
nil
{
if
err
:=
hasher
.
SetSpanBytes
(
span
);
err
!=
nil
{
return
nil
,
err
return
nil
,
err
}
}
...
...
pkg/swarm/swarm.go
View file @
1236d45d
...
@@ -18,6 +18,7 @@ const (
...
@@ -18,6 +18,7 @@ const (
SpanSize
=
8
SpanSize
=
8
SectionSize
=
32
SectionSize
=
32
Branches
=
128
Branches
=
128
BmtBranches
=
128
ChunkSize
=
SectionSize
*
Branches
ChunkSize
=
SectionSize
*
Branches
HashSize
=
32
HashSize
=
32
MaxPO
uint8
=
15
MaxPO
uint8
=
15
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment