Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
M
mybee
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
vicotor
mybee
Commits
dfa4c0dc
Unverified
Commit
dfa4c0dc
authored
Apr 09, 2020
by
Zahoor Mohamed
Committed by
GitHub
Apr 09, 2020
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
P4 - Replace log from ethereum to bee's own log library (#74)
parent
852fbd93
Changes
13
Hide whitespace changes
Inline
Side-by-side
Showing
13 changed files
with
159 additions
and
75 deletions
+159
-75
db.go
pkg/shed/db.go
+14
-5
db_test.go
pkg/shed/db_test.go
+9
-5
example_store_test.go
pkg/shed/example_store_test.go
+9
-7
field_string.go
pkg/shed/field_string.go
+9
-5
field_string_test.go
pkg/shed/field_string_test.go
+4
-1
field_struct.go
pkg/shed/field_struct.go
+12
-5
field_struct_test.go
pkg/shed/field_struct_test.go
+4
-2
field_uint64.go
pkg/shed/field_uint64.go
+17
-5
field_uint64_test.go
pkg/shed/field_uint64_test.go
+12
-10
index.go
pkg/shed/index.go
+27
-1
index_test.go
pkg/shed/index_test.go
+18
-14
vector_uint64.go
pkg/shed/vector_uint64.go
+12
-5
vector_uint64_test.go
pkg/shed/vector_uint64_test.go
+12
-10
No files found.
pkg/shed/db.go
View file @
dfa4c0dc
...
...
@@ -23,13 +23,14 @@
package
shed
import
(
"github.com/ethersphere/bee/pkg/logging"
"github.com/syndtr/goleveldb/leveldb"
"github.com/syndtr/goleveldb/leveldb/iterator"
"github.com/syndtr/goleveldb/leveldb/opt"
)
const
(
openFileLimit
=
128
// The limit for LevelDB OpenFilesCacheCapacity.
openFileLimit
=
128
// The limit for LevelDB OpenFilesCacheCapacity.
)
// DB provides abstractions over LevelDB in order to
...
...
@@ -37,15 +38,16 @@ const (
// It provides a schema functionality to store fields and indexes
// information about naming and types.
type
DB
struct
{
ldb
*
leveldb
.
DB
ldb
*
leveldb
.
DB
metrics
metrics
quit
chan
struct
{}
// Quit channel to stop the metrics collection before closing the database
logger
logging
.
Logger
quit
chan
struct
{}
// Quit channel to stop the metrics collection before closing the database
}
// NewDB constructs a new DB and validates the schema
// if it exists in database on the given path.
// metricsPrefix is used for metrics collection for the given DB.
func
NewDB
(
path
string
,
metricsPrefix
string
)
(
db
*
DB
,
err
error
)
{
func
NewDB
(
path
string
,
logger
logging
.
Logger
)
(
db
*
DB
,
err
error
)
{
ldb
,
err
:=
leveldb
.
OpenFile
(
path
,
&
opt
.
Options
{
OpenFilesCacheCapacity
:
openFileLimit
,
})
...
...
@@ -53,8 +55,9 @@ func NewDB(path string, metricsPrefix string) (db *DB, err error) {
return
nil
,
err
}
db
=
&
DB
{
ldb
:
ldb
,
ldb
:
ldb
,
metrics
:
newMetrics
(),
logger
:
logger
,
}
if
_
,
err
=
db
.
getSchema
();
err
!=
nil
{
...
...
@@ -81,6 +84,7 @@ func NewDB(path string, metricsPrefix string) (db *DB, err error) {
func
(
db
*
DB
)
Put
(
key
[]
byte
,
value
[]
byte
)
(
err
error
)
{
err
=
db
.
ldb
.
Put
(
key
,
value
,
nil
)
if
err
!=
nil
{
db
.
logger
.
Debugf
(
"failed to insert in to DB. Error : %s"
,
err
.
Error
())
db
.
metrics
.
PutFailCounter
.
Inc
()
return
err
}
...
...
@@ -92,9 +96,11 @@ func (db *DB) Put(key []byte, value []byte) (err error) {
func
(
db
*
DB
)
Get
(
key
[]
byte
)
(
value
[]
byte
,
err
error
)
{
value
,
err
=
db
.
ldb
.
Get
(
key
,
nil
)
if
err
==
leveldb
.
ErrNotFound
{
db
.
logger
.
Debugf
(
"key %s not found during GET"
,
string
(
key
))
db
.
metrics
.
GetNotFoundCounter
.
Inc
()
return
nil
,
err
}
else
{
db
.
logger
.
Errorf
(
"key %s not found in DB"
,
string
(
key
))
db
.
metrics
.
GetFailCounter
.
Inc
()
}
db
.
metrics
.
GetCounter
.
Inc
()
...
...
@@ -105,6 +111,7 @@ func (db *DB) Get(key []byte) (value []byte, err error) {
func
(
db
*
DB
)
Has
(
key
[]
byte
)
(
yes
bool
,
err
error
)
{
yes
,
err
=
db
.
ldb
.
Has
(
key
,
nil
)
if
err
!=
nil
{
db
.
logger
.
Debugf
(
"encountered error during HAS of key %s. Error: %s "
,
string
(
key
),
err
.
Error
())
db
.
metrics
.
HasFailCounter
.
Inc
()
return
false
,
err
}
...
...
@@ -116,6 +123,7 @@ func (db *DB) Has(key []byte) (yes bool, err error) {
func
(
db
*
DB
)
Delete
(
key
[]
byte
)
(
err
error
)
{
err
=
db
.
ldb
.
Delete
(
key
,
nil
)
if
err
!=
nil
{
db
.
logger
.
Debugf
(
"could not DELETE key %s. Error: %s "
,
string
(
key
),
err
.
Error
())
db
.
metrics
.
DeleteFailCounter
.
Inc
()
return
err
}
...
...
@@ -133,6 +141,7 @@ func (db *DB) NewIterator() iterator.Iterator {
func
(
db
*
DB
)
WriteBatch
(
batch
*
leveldb
.
Batch
)
(
err
error
)
{
err
=
db
.
ldb
.
Write
(
batch
,
nil
)
if
err
!=
nil
{
db
.
logger
.
Debugf
(
"could not writing batch. Error: %s "
,
err
.
Error
())
db
.
metrics
.
WriteBatchFailCounter
.
Inc
()
return
err
}
...
...
pkg/shed/db_test.go
View file @
dfa4c0dc
...
...
@@ -20,6 +20,8 @@ import (
"io/ioutil"
"os"
"testing"
"github.com/ethersphere/bee/pkg/logging"
)
// TestNewDB constructs a new DB
...
...
@@ -54,12 +56,13 @@ func TestDB_persistence(t *testing.T) {
t
.
Fatal
(
err
)
}
defer
os
.
RemoveAll
(
dir
)
logger
:=
logging
.
New
(
ioutil
.
Discard
,
0
)
db
,
err
:=
NewDB
(
dir
,
""
)
db
,
err
:=
NewDB
(
dir
,
logger
)
if
err
!=
nil
{
t
.
Fatal
(
err
)
}
stringField
,
err
:=
db
.
NewStringField
(
"preserve-me"
)
stringField
,
err
:=
db
.
NewStringField
(
"preserve-me"
,
logger
)
if
err
!=
nil
{
t
.
Fatal
(
err
)
}
...
...
@@ -73,11 +76,11 @@ func TestDB_persistence(t *testing.T) {
t
.
Fatal
(
err
)
}
db2
,
err
:=
NewDB
(
dir
,
""
)
db2
,
err
:=
NewDB
(
dir
,
logger
)
if
err
!=
nil
{
t
.
Fatal
(
err
)
}
stringField2
,
err
:=
db2
.
NewStringField
(
"preserve-me"
)
stringField2
,
err
:=
db2
.
NewStringField
(
"preserve-me"
,
logger
)
if
err
!=
nil
{
t
.
Fatal
(
err
)
}
...
...
@@ -100,7 +103,8 @@ func newTestDB(t *testing.T) (db *DB, cleanupFunc func()) {
if
err
!=
nil
{
t
.
Fatal
(
err
)
}
db
,
err
=
NewDB
(
dir
,
""
)
logger
:=
logging
.
New
(
ioutil
.
Discard
,
0
)
db
,
err
=
NewDB
(
dir
,
logger
)
if
err
!=
nil
{
os
.
RemoveAll
(
dir
)
t
.
Fatal
(
err
)
...
...
pkg/shed/example_store_test.go
View file @
dfa4c0dc
...
...
@@ -26,7 +26,8 @@ import (
"os"
"time"
"github.com/ethersphere/swarm/shed"
"github.com/ethersphere/bee/pkg/logging"
"github.com/ethersphere/bee/pkg/shed"
"github.com/ethersphere/swarm/storage"
"github.com/syndtr/goleveldb/leveldb"
)
...
...
@@ -51,7 +52,8 @@ type Store struct {
// and possible conflicts with schema from existing database is checked
// automatically.
func
New
(
path
string
)
(
s
*
Store
,
err
error
)
{
db
,
err
:=
shed
.
NewDB
(
path
,
""
)
logger
:=
logging
.
New
(
ioutil
.
Discard
,
0
)
db
,
err
:=
shed
.
NewDB
(
path
,
logger
)
if
err
!=
nil
{
return
nil
,
err
}
...
...
@@ -59,12 +61,12 @@ func New(path string) (s *Store, err error) {
db
:
db
,
}
// Identify current storage schema by arbitrary name.
s
.
schemaName
,
err
=
db
.
NewStringField
(
"schema-name"
)
s
.
schemaName
,
err
=
db
.
NewStringField
(
"schema-name"
,
logger
)
if
err
!=
nil
{
return
nil
,
err
}
// Global ever incrementing index of chunk accesses.
s
.
accessCounter
,
err
=
db
.
NewUint64Field
(
"access-counter"
)
s
.
accessCounter
,
err
=
db
.
NewUint64Field
(
"access-counter"
,
logger
)
if
err
!=
nil
{
return
nil
,
err
}
...
...
@@ -88,7 +90,7 @@ func New(path string) (s *Store, err error) {
e
.
Data
=
value
[
8
:
]
return
e
,
nil
},
})
}
,
logger
)
if
err
!=
nil
{
return
nil
,
err
}
...
...
@@ -111,7 +113,7 @@ func New(path string) (s *Store, err error) {
e
.
AccessTimestamp
=
int64
(
binary
.
BigEndian
.
Uint64
(
value
))
return
e
,
nil
},
})
}
,
logger
)
if
err
!=
nil
{
return
nil
,
err
}
...
...
@@ -136,7 +138,7 @@ func New(path string) (s *Store, err error) {
DecodeValue
:
func
(
keyItem
shed
.
Item
,
value
[]
byte
)
(
e
shed
.
Item
,
err
error
)
{
return
e
,
nil
},
})
}
,
logger
)
if
err
!=
nil
{
return
nil
,
err
}
...
...
pkg/shed/field_string.go
View file @
dfa4c0dc
...
...
@@ -17,26 +17,29 @@
package
shed
import
(
"github.com/ethersphere/bee/pkg/logging"
"github.com/syndtr/goleveldb/leveldb"
)
// StringField is the most simple field implementation
// that stores an arbitrary string under a specific LevelDB key.
type
StringField
struct
{
db
*
DB
key
[]
byte
db
*
DB
key
[]
byte
logger
logging
.
Logger
}
// NewStringField retruns a new Instance of StringField.
// It validates its name and type against the database schema.
func
(
db
*
DB
)
NewStringField
(
name
string
)
(
f
StringField
,
err
error
)
{
func
(
db
*
DB
)
NewStringField
(
name
string
,
logger
logging
.
Logger
)
(
f
StringField
,
err
error
)
{
key
,
err
:=
db
.
schemaFieldKey
(
name
,
"string"
)
if
err
!=
nil
{
return
f
,
err
}
return
StringField
{
db
:
db
,
key
:
key
,
db
:
db
,
key
:
key
,
logger
:
logger
,
},
nil
}
...
...
@@ -47,6 +50,7 @@ func (f StringField) Get() (val string, err error) {
b
,
err
:=
f
.
db
.
Get
(
f
.
key
)
if
err
!=
nil
{
if
err
==
leveldb
.
ErrNotFound
{
f
.
logger
.
Errorf
(
"key %s not found"
,
string
(
f
.
key
))
return
""
,
nil
}
return
""
,
err
...
...
pkg/shed/field_string_test.go
View file @
dfa4c0dc
...
...
@@ -17,8 +17,10 @@
package
shed
import
(
"io/ioutil"
"testing"
"github.com/ethersphere/bee/pkg/logging"
"github.com/syndtr/goleveldb/leveldb"
)
...
...
@@ -28,7 +30,8 @@ func TestStringField(t *testing.T) {
db
,
cleanupFunc
:=
newTestDB
(
t
)
defer
cleanupFunc
()
simpleString
,
err
:=
db
.
NewStringField
(
"simple-string"
)
logger
:=
logging
.
New
(
ioutil
.
Discard
,
0
)
simpleString
,
err
:=
db
.
NewStringField
(
"simple-string"
,
logger
)
if
err
!=
nil
{
t
.
Fatal
(
err
)
}
...
...
pkg/shed/field_struct.go
View file @
dfa4c0dc
...
...
@@ -18,26 +18,30 @@ package shed
import
(
"encoding/json"
"github.com/ethersphere/bee/pkg/logging"
"github.com/syndtr/goleveldb/leveldb"
)
// StructField is a helper to store complex structure by
// encoding it in RLP format.
type
StructField
struct
{
db
*
DB
key
[]
byte
db
*
DB
key
[]
byte
logger
logging
.
Logger
}
// NewStructField returns a new StructField.
// It validates its name and type against the database schema.
func
(
db
*
DB
)
NewStructField
(
name
string
)
(
f
StructField
,
err
error
)
{
func
(
db
*
DB
)
NewStructField
(
name
string
,
logger
logging
.
Logger
)
(
f
StructField
,
err
error
)
{
key
,
err
:=
db
.
schemaFieldKey
(
name
,
"struct-rlp"
)
if
err
!=
nil
{
return
f
,
err
}
return
StructField
{
db
:
db
,
key
:
key
,
db
:
db
,
key
:
key
,
logger
:
logger
,
},
nil
}
...
...
@@ -46,6 +50,7 @@ func (db *DB) NewStructField(name string) (f StructField, err error) {
func
(
f
StructField
)
Get
(
val
interface
{})
(
err
error
)
{
b
,
err
:=
f
.
db
.
Get
(
f
.
key
)
if
err
!=
nil
{
f
.
logger
.
Debugf
(
"could not GET key %s"
,
string
(
f
.
key
))
return
err
}
return
json
.
Unmarshal
(
b
,
val
)
...
...
@@ -55,6 +60,7 @@ func (f StructField) Get(val interface{}) (err error) {
func
(
f
StructField
)
Put
(
val
interface
{})
(
err
error
)
{
b
,
err
:=
json
.
Marshal
(
val
)
if
err
!=
nil
{
f
.
logger
.
Debugf
(
"could not PUT key %s"
,
string
(
f
.
key
))
return
err
}
return
f
.
db
.
Put
(
f
.
key
,
b
)
...
...
@@ -64,6 +70,7 @@ func (f StructField) Put(val interface{}) (err error) {
func
(
f
StructField
)
PutInBatch
(
batch
*
leveldb
.
Batch
,
val
interface
{})
(
err
error
)
{
b
,
err
:=
json
.
Marshal
(
val
)
if
err
!=
nil
{
f
.
logger
.
Debugf
(
"could not PUT key %s in batch"
,
string
(
f
.
key
))
return
err
}
batch
.
Put
(
f
.
key
,
b
)
...
...
pkg/shed/field_struct_test.go
View file @
dfa4c0dc
...
...
@@ -17,8 +17,10 @@
package
shed
import
(
"io/ioutil"
"testing"
"github.com/ethersphere/bee/pkg/logging"
"github.com/syndtr/goleveldb/leveldb"
)
...
...
@@ -27,8 +29,8 @@ import (
func
TestStructField
(
t
*
testing
.
T
)
{
db
,
cleanupFunc
:=
newTestDB
(
t
)
defer
cleanupFunc
()
complexField
,
err
:=
db
.
NewStructField
(
"complex-field"
)
logger
:=
logging
.
New
(
ioutil
.
Discard
,
0
)
complexField
,
err
:=
db
.
NewStructField
(
"complex-field"
,
logger
)
if
err
!=
nil
{
t
.
Fatal
(
err
)
}
...
...
pkg/shed/field_uint64.go
View file @
dfa4c0dc
...
...
@@ -19,26 +19,29 @@ package shed
import
(
"encoding/binary"
"github.com/ethersphere/bee/pkg/logging"
"github.com/syndtr/goleveldb/leveldb"
)
// Uint64Field provides a way to have a simple counter in the database.
// It transparently encodes uint64 type value to bytes.
type
Uint64Field
struct
{
db
*
DB
key
[]
byte
db
*
DB
key
[]
byte
logger
logging
.
Logger
}
// NewUint64Field returns a new Uint64Field.
// It validates its name and type against the database schema.
func
(
db
*
DB
)
NewUint64Field
(
name
string
)
(
f
Uint64Field
,
err
error
)
{
func
(
db
*
DB
)
NewUint64Field
(
name
string
,
logger
logging
.
Logger
)
(
f
Uint64Field
,
err
error
)
{
key
,
err
:=
db
.
schemaFieldKey
(
name
,
"uint64"
)
if
err
!=
nil
{
return
f
,
err
}
return
Uint64Field
{
db
:
db
,
key
:
key
,
db
:
db
,
key
:
key
,
logger
:
logger
,
},
nil
}
...
...
@@ -49,6 +52,7 @@ func (f Uint64Field) Get() (val uint64, err error) {
b
,
err
:=
f
.
db
.
Get
(
f
.
key
)
if
err
!=
nil
{
if
err
==
leveldb
.
ErrNotFound
{
f
.
logger
.
Errorf
(
"key %s not found"
,
string
(
f
.
key
))
return
0
,
nil
}
return
0
,
err
...
...
@@ -73,8 +77,10 @@ func (f Uint64Field) Inc() (val uint64, err error) {
val
,
err
=
f
.
Get
()
if
err
!=
nil
{
if
err
==
leveldb
.
ErrNotFound
{
f
.
logger
.
Debugf
(
"key %s not found"
,
string
(
f
.
key
))
val
=
0
}
else
{
f
.
logger
.
Errorf
(
"key %s not found. Error: %s"
,
string
(
f
.
key
),
err
.
Error
())
return
0
,
err
}
}
...
...
@@ -89,8 +95,10 @@ func (f Uint64Field) IncInBatch(batch *leveldb.Batch) (val uint64, err error) {
val
,
err
=
f
.
Get
()
if
err
!=
nil
{
if
err
==
leveldb
.
ErrNotFound
{
f
.
logger
.
Debugf
(
"key %s not found"
,
string
(
f
.
key
))
val
=
0
}
else
{
f
.
logger
.
Errorf
(
"key %s not found. Error: %s"
,
string
(
f
.
key
),
err
.
Error
())
return
0
,
err
}
}
...
...
@@ -106,8 +114,10 @@ func (f Uint64Field) Dec() (val uint64, err error) {
val
,
err
=
f
.
Get
()
if
err
!=
nil
{
if
err
==
leveldb
.
ErrNotFound
{
f
.
logger
.
Debugf
(
"key %s not found"
,
string
(
f
.
key
))
val
=
0
}
else
{
f
.
logger
.
Errorf
(
"key %s not found. Error: %s"
,
string
(
f
.
key
),
err
.
Error
())
return
0
,
err
}
}
...
...
@@ -125,8 +135,10 @@ func (f Uint64Field) DecInBatch(batch *leveldb.Batch) (val uint64, err error) {
val
,
err
=
f
.
Get
()
if
err
!=
nil
{
if
err
==
leveldb
.
ErrNotFound
{
f
.
logger
.
Debugf
(
"key %s not found"
,
string
(
f
.
key
))
val
=
0
}
else
{
f
.
logger
.
Errorf
(
"key %s not found. Error: %s"
,
string
(
f
.
key
),
err
.
Error
())
return
0
,
err
}
}
...
...
pkg/shed/field_uint64_test.go
View file @
dfa4c0dc
...
...
@@ -17,8 +17,10 @@
package
shed
import
(
"io/ioutil"
"testing"
"github.com/ethersphere/bee/pkg/logging"
"github.com/syndtr/goleveldb/leveldb"
)
...
...
@@ -27,8 +29,8 @@ import (
func
TestUint64Field
(
t
*
testing
.
T
)
{
db
,
cleanupFunc
:=
newTestDB
(
t
)
defer
cleanupFunc
()
counter
,
err
:=
db
.
NewUint64Field
(
"counter"
)
logger
:=
logging
.
New
(
ioutil
.
Discard
,
0
)
counter
,
err
:=
db
.
NewUint64Field
(
"counter"
,
logger
)
if
err
!=
nil
{
t
.
Fatal
(
err
)
}
...
...
@@ -114,8 +116,8 @@ func TestUint64Field(t *testing.T) {
func
TestUint64Field_Inc
(
t
*
testing
.
T
)
{
db
,
cleanupFunc
:=
newTestDB
(
t
)
defer
cleanupFunc
()
counter
,
err
:=
db
.
NewUint64Field
(
"counter"
)
logger
:=
logging
.
New
(
ioutil
.
Discard
,
0
)
counter
,
err
:=
db
.
NewUint64Field
(
"counter"
,
logger
)
if
err
!=
nil
{
t
.
Fatal
(
err
)
}
...
...
@@ -144,8 +146,8 @@ func TestUint64Field_Inc(t *testing.T) {
func
TestUint64Field_IncInBatch
(
t
*
testing
.
T
)
{
db
,
cleanupFunc
:=
newTestDB
(
t
)
defer
cleanupFunc
()
counter
,
err
:=
db
.
NewUint64Field
(
"counter"
)
logger
:=
logging
.
New
(
ioutil
.
Discard
,
0
)
counter
,
err
:=
db
.
NewUint64Field
(
"counter"
,
logger
)
if
err
!=
nil
{
t
.
Fatal
(
err
)
}
...
...
@@ -198,8 +200,8 @@ func TestUint64Field_IncInBatch(t *testing.T) {
func
TestUint64Field_Dec
(
t
*
testing
.
T
)
{
db
,
cleanupFunc
:=
newTestDB
(
t
)
defer
cleanupFunc
()
counter
,
err
:=
db
.
NewUint64Field
(
"counter"
)
logger
:=
logging
.
New
(
ioutil
.
Discard
,
0
)
counter
,
err
:=
db
.
NewUint64Field
(
"counter"
,
logger
)
if
err
!=
nil
{
t
.
Fatal
(
err
)
}
...
...
@@ -235,8 +237,8 @@ func TestUint64Field_Dec(t *testing.T) {
func
TestUint64Field_DecInBatch
(
t
*
testing
.
T
)
{
db
,
cleanupFunc
:=
newTestDB
(
t
)
defer
cleanupFunc
()
counter
,
err
:=
db
.
NewUint64Field
(
"counter"
)
logger
:=
logging
.
New
(
ioutil
.
Discard
,
0
)
counter
,
err
:=
db
.
NewUint64Field
(
"counter"
,
logger
)
if
err
!=
nil
{
t
.
Fatal
(
err
)
}
...
...
pkg/shed/index.go
View file @
dfa4c0dc
...
...
@@ -19,6 +19,7 @@ package shed
import
(
"bytes"
"github.com/ethersphere/bee/pkg/logging"
"github.com/syndtr/goleveldb/leveldb"
"github.com/syndtr/goleveldb/leveldb/iterator"
)
...
...
@@ -82,6 +83,7 @@ func (i Item) Merge(i2 Item) (new Item) {
// It implements IndexIteratorInterface interface.
type
Index
struct
{
db
*
DB
logger
logging
.
Logger
prefix
[]
byte
encodeKeyFunc
func
(
fields
Item
)
(
key
[]
byte
,
err
error
)
decodeKeyFunc
func
(
key
[]
byte
)
(
e
Item
,
err
error
)
...
...
@@ -101,7 +103,7 @@ type IndexFuncs struct {
// NewIndex returns a new Index instance with defined name and
// encoding functions. The name must be unique and will be validated
// on database schema for a key prefix byte.
func
(
db
*
DB
)
NewIndex
(
name
string
,
funcs
IndexFuncs
)
(
f
Index
,
err
error
)
{
func
(
db
*
DB
)
NewIndex
(
name
string
,
funcs
IndexFuncs
,
logger
logging
.
Logger
)
(
f
Index
,
err
error
)
{
id
,
err
:=
db
.
schemaIndexPrefix
(
name
)
if
err
!=
nil
{
return
f
,
err
...
...
@@ -109,6 +111,7 @@ func (db *DB) NewIndex(name string, funcs IndexFuncs) (f Index, err error) {
prefix
:=
[]
byte
{
id
}
return
Index
{
db
:
db
,
logger
:
logger
,
prefix
:
prefix
,
// This function adjusts Index LevelDB key
// by appending the provided index id byte.
...
...
@@ -138,14 +141,17 @@ func (db *DB) NewIndex(name string, funcs IndexFuncs) (f Index, err error) {
func
(
f
Index
)
Get
(
keyFields
Item
)
(
out
Item
,
err
error
)
{
key
,
err
:=
f
.
encodeKeyFunc
(
keyFields
)
if
err
!=
nil
{
f
.
logger
.
Debugf
(
"keyfields encoding error in Get. Error: %s"
,
err
.
Error
())
return
out
,
err
}
value
,
err
:=
f
.
db
.
Get
(
key
)
if
err
!=
nil
{
f
.
logger
.
Debugf
(
"error getting key %s in Get. Error: %s"
,
string
(
key
),
err
.
Error
())
return
out
,
err
}
out
,
err
=
f
.
decodeValueFunc
(
keyFields
,
value
)
if
err
!=
nil
{
f
.
logger
.
Debugf
(
"error decofing keyfields in Get. Error: %s"
,
err
.
Error
())
return
out
,
err
}
return
out
.
Merge
(
keyFields
),
nil
...
...
@@ -160,6 +166,7 @@ func (f Index) Get(keyFields Item) (out Item, err error) {
func
(
f
Index
)
Fill
(
items
[]
Item
)
(
err
error
)
{
snapshot
,
err
:=
f
.
db
.
ldb
.
GetSnapshot
()
if
err
!=
nil
{
f
.
logger
.
Debugf
(
"error getting snapshot in Fill. Error: %s"
,
err
.
Error
())
return
err
}
defer
snapshot
.
Release
()
...
...
@@ -167,14 +174,17 @@ func (f Index) Fill(items []Item) (err error) {
for
i
,
item
:=
range
items
{
key
,
err
:=
f
.
encodeKeyFunc
(
item
)
if
err
!=
nil
{
f
.
logger
.
Debugf
(
"keyfields encoding error in Fill. Error: %s"
,
err
.
Error
())
return
err
}
value
,
err
:=
snapshot
.
Get
(
key
,
nil
)
if
err
!=
nil
{
f
.
logger
.
Debugf
(
"error getting key %s in Fill. Error: %s"
,
string
(
key
),
err
.
Error
())
return
err
}
v
,
err
:=
f
.
decodeValueFunc
(
item
,
value
)
if
err
!=
nil
{
f
.
logger
.
Debugf
(
"error decofing keyfields in Fill . Error: %s"
,
err
.
Error
())
return
err
}
items
[
i
]
=
v
.
Merge
(
item
)
...
...
@@ -188,6 +198,7 @@ func (f Index) Fill(items []Item) (err error) {
func
(
f
Index
)
Has
(
keyFields
Item
)
(
bool
,
error
)
{
key
,
err
:=
f
.
encodeKeyFunc
(
keyFields
)
if
err
!=
nil
{
f
.
logger
.
Debugf
(
"keyfields encoding error in Has. Error: %s"
,
err
.
Error
())
return
false
,
err
}
return
f
.
db
.
Has
(
key
)
...
...
@@ -199,16 +210,19 @@ func (f Index) HasMulti(items ...Item) ([]bool, error) {
have
:=
make
([]
bool
,
len
(
items
))
snapshot
,
err
:=
f
.
db
.
ldb
.
GetSnapshot
()
if
err
!=
nil
{
f
.
logger
.
Debugf
(
"error getting snapshot in HasMulti. Error: %s"
,
err
.
Error
())
return
nil
,
err
}
defer
snapshot
.
Release
()
for
i
,
keyFields
:=
range
items
{
key
,
err
:=
f
.
encodeKeyFunc
(
keyFields
)
if
err
!=
nil
{
f
.
logger
.
Debugf
(
"keyfields encoding error in HasMulti. Error: %s"
,
err
.
Error
())
return
nil
,
err
}
have
[
i
],
err
=
snapshot
.
Has
(
key
,
nil
)
if
err
!=
nil
{
f
.
logger
.
Debugf
(
"snaoshot Has error in HasMulti. Error: %s"
,
err
.
Error
())
return
nil
,
err
}
}
...
...
@@ -220,10 +234,12 @@ func (f Index) HasMulti(items ...Item) ([]bool, error) {
func
(
f
Index
)
Put
(
i
Item
)
(
err
error
)
{
key
,
err
:=
f
.
encodeKeyFunc
(
i
)
if
err
!=
nil
{
f
.
logger
.
Debugf
(
"keyfields encoding error in Put. Error: %s"
,
err
.
Error
())
return
err
}
value
,
err
:=
f
.
encodeValueFunc
(
i
)
if
err
!=
nil
{
f
.
logger
.
Debugf
(
"keyfields encoding error in Put. Error: %s"
,
err
.
Error
())
return
err
}
return
f
.
db
.
Put
(
key
,
value
)
...
...
@@ -235,10 +251,12 @@ func (f Index) Put(i Item) (err error) {
func
(
f
Index
)
PutInBatch
(
batch
*
leveldb
.
Batch
,
i
Item
)
(
err
error
)
{
key
,
err
:=
f
.
encodeKeyFunc
(
i
)
if
err
!=
nil
{
f
.
logger
.
Debugf
(
"keyfields encoding error in PutInBatch. Error: %s"
,
err
.
Error
())
return
err
}
value
,
err
:=
f
.
encodeValueFunc
(
i
)
if
err
!=
nil
{
f
.
logger
.
Debugf
(
"keyfields encoding error in PutInBatch. Error: %s"
,
err
.
Error
())
return
err
}
batch
.
Put
(
key
,
value
)
...
...
@@ -250,6 +268,7 @@ func (f Index) PutInBatch(batch *leveldb.Batch, i Item) (err error) {
func
(
f
Index
)
Delete
(
keyFields
Item
)
(
err
error
)
{
key
,
err
:=
f
.
encodeKeyFunc
(
keyFields
)
if
err
!=
nil
{
f
.
logger
.
Debugf
(
"keyfields encoding error in Delete. Error: %s"
,
err
.
Error
())
return
err
}
return
f
.
db
.
Delete
(
key
)
...
...
@@ -260,6 +279,7 @@ func (f Index) Delete(keyFields Item) (err error) {
func
(
f
Index
)
DeleteInBatch
(
batch
*
leveldb
.
Batch
,
keyFields
Item
)
(
err
error
)
{
key
,
err
:=
f
.
encodeKeyFunc
(
keyFields
)
if
err
!=
nil
{
f
.
logger
.
Debugf
(
"keyfields encoding error in DeleteInBatch. Error: %s"
,
err
.
Error
())
return
err
}
batch
.
Delete
(
key
)
...
...
@@ -298,6 +318,7 @@ func (f Index) Iterate(fn IndexIterFunc, options *IterateOptions) (err error) {
// start from the provided StartFrom Item key value
startKey
,
err
=
f
.
encodeKeyFunc
(
*
options
.
StartFrom
)
if
err
!=
nil
{
f
.
logger
.
Debugf
(
"keyfields encoding error in Iterate. Error: %s"
,
err
.
Error
())
return
err
}
}
...
...
@@ -307,6 +328,7 @@ func (f Index) Iterate(fn IndexIterFunc, options *IterateOptions) (err error) {
// move the cursor to the start key
ok
:=
it
.
Seek
(
startKey
)
if
!
ok
{
f
.
logger
.
Debugf
(
"seek error in Iterate. Error: %s"
,
it
.
Error
())
// stop iterator if seek has failed
return
it
.
Error
()
}
...
...
@@ -325,6 +347,7 @@ func (f Index) Iterate(fn IndexIterFunc, options *IterateOptions) (err error) {
}
stop
,
err
:=
fn
(
item
)
if
err
!=
nil
{
f
.
logger
.
Debugf
(
"error executing callback function in Iterate. Error: %s"
,
err
.
Error
())
return
err
}
if
stop
{
...
...
@@ -359,11 +382,13 @@ func (f Index) itemFromIterator(it iterator.Iterator, totalPrefix []byte) (i Ite
// create a copy of key byte slice not to share leveldb underlaying slice array
keyItem
,
err
:=
f
.
decodeKeyFunc
(
append
([]
byte
(
nil
),
key
...
))
if
err
!=
nil
{
f
.
logger
.
Debugf
(
"error decoding key in itemFromIterator. Error: %s"
,
err
.
Error
())
return
i
,
err
}
// create a copy of value byte slice not to share leveldb underlaying slice array
valueItem
,
err
:=
f
.
decodeValueFunc
(
keyItem
,
append
([]
byte
(
nil
),
it
.
Value
()
...
))
if
err
!=
nil
{
f
.
logger
.
Debugf
(
"error decoding value in itemFromIterator. Error: %s"
,
err
.
Error
())
return
i
,
err
}
return
keyItem
.
Merge
(
valueItem
),
it
.
Error
()
...
...
@@ -435,6 +460,7 @@ func (f Index) Count() (count int, err error) {
func
(
f
Index
)
CountFrom
(
start
Item
)
(
count
int
,
err
error
)
{
startKey
,
err
:=
f
.
encodeKeyFunc
(
start
)
if
err
!=
nil
{
f
.
logger
.
Debugf
(
"error encoding item in CountFrom. Error: %s"
,
err
.
Error
())
return
0
,
err
}
it
:=
f
.
db
.
NewIterator
()
...
...
pkg/shed/index_test.go
View file @
dfa4c0dc
...
...
@@ -20,10 +20,12 @@ import (
"bytes"
"encoding/binary"
"fmt"
"io/ioutil"
"sort"
"testing"
"time"
"github.com/ethersphere/bee/pkg/logging"
"github.com/syndtr/goleveldb/leveldb"
)
...
...
@@ -53,8 +55,8 @@ var retrievalIndexFuncs = IndexFuncs{
func
TestIndex
(
t
*
testing
.
T
)
{
db
,
cleanupFunc
:=
newTestDB
(
t
)
defer
cleanupFunc
()
index
,
err
:=
db
.
NewIndex
(
"retrieval"
,
retrievalIndexFuncs
)
logger
:=
logging
.
New
(
ioutil
.
Discard
,
0
)
index
,
err
:=
db
.
NewIndex
(
"retrieval"
,
retrievalIndexFuncs
,
logger
)
if
err
!=
nil
{
t
.
Fatal
(
err
)
}
...
...
@@ -367,8 +369,8 @@ func TestIndex(t *testing.T) {
func
TestIndex_Iterate
(
t
*
testing
.
T
)
{
db
,
cleanupFunc
:=
newTestDB
(
t
)
defer
cleanupFunc
()
index
,
err
:=
db
.
NewIndex
(
"retrieval"
,
retrievalIndexFuncs
)
logger
:=
logging
.
New
(
ioutil
.
Discard
,
0
)
index
,
err
:=
db
.
NewIndex
(
"retrieval"
,
retrievalIndexFuncs
,
logger
)
if
err
!=
nil
{
t
.
Fatal
(
err
)
}
...
...
@@ -502,7 +504,8 @@ func TestIndex_Iterate(t *testing.T) {
})
t
.
Run
(
"no overflow"
,
func
(
t
*
testing
.
T
)
{
secondIndex
,
err
:=
db
.
NewIndex
(
"second-index"
,
retrievalIndexFuncs
)
logger
:=
logging
.
New
(
ioutil
.
Discard
,
0
)
secondIndex
,
err
:=
db
.
NewIndex
(
"second-index"
,
retrievalIndexFuncs
,
logger
)
if
err
!=
nil
{
t
.
Fatal
(
err
)
}
...
...
@@ -550,8 +553,8 @@ func TestIndex_Iterate(t *testing.T) {
func
TestIndex_Iterate_withPrefix
(
t
*
testing
.
T
)
{
db
,
cleanupFunc
:=
newTestDB
(
t
)
defer
cleanupFunc
()
index
,
err
:=
db
.
NewIndex
(
"retrieval"
,
retrievalIndexFuncs
)
logger
:=
logging
.
New
(
ioutil
.
Discard
,
0
)
index
,
err
:=
db
.
NewIndex
(
"retrieval"
,
retrievalIndexFuncs
,
logger
)
if
err
!=
nil
{
t
.
Fatal
(
err
)
}
...
...
@@ -697,7 +700,8 @@ func TestIndex_Iterate_withPrefix(t *testing.T) {
})
t
.
Run
(
"no overflow"
,
func
(
t
*
testing
.
T
)
{
secondIndex
,
err
:=
db
.
NewIndex
(
"second-index"
,
retrievalIndexFuncs
)
logger
:=
logging
.
New
(
ioutil
.
Discard
,
0
)
secondIndex
,
err
:=
db
.
NewIndex
(
"second-index"
,
retrievalIndexFuncs
,
logger
)
if
err
!=
nil
{
t
.
Fatal
(
err
)
}
...
...
@@ -737,8 +741,8 @@ func TestIndex_Iterate_withPrefix(t *testing.T) {
func
TestIndex_count
(
t
*
testing
.
T
)
{
db
,
cleanupFunc
:=
newTestDB
(
t
)
defer
cleanupFunc
()
index
,
err
:=
db
.
NewIndex
(
"retrieval"
,
retrievalIndexFuncs
)
logger
:=
logging
.
New
(
ioutil
.
Discard
,
0
)
index
,
err
:=
db
.
NewIndex
(
"retrieval"
,
retrievalIndexFuncs
,
logger
)
if
err
!=
nil
{
t
.
Fatal
(
err
)
}
...
...
@@ -907,8 +911,8 @@ func checkItem(t *testing.T, got, want Item) {
func
TestIndex_firstAndLast
(
t
*
testing
.
T
)
{
db
,
cleanupFunc
:=
newTestDB
(
t
)
defer
cleanupFunc
()
index
,
err
:=
db
.
NewIndex
(
"retrieval"
,
retrievalIndexFuncs
)
logger
:=
logging
.
New
(
ioutil
.
Discard
,
0
)
index
,
err
:=
db
.
NewIndex
(
"retrieval"
,
retrievalIndexFuncs
,
logger
)
if
err
!=
nil
{
t
.
Fatal
(
err
)
}
...
...
@@ -1056,8 +1060,8 @@ func TestIncByteSlice(t *testing.T) {
func
TestIndex_HasMulti
(
t
*
testing
.
T
)
{
db
,
cleanupFunc
:=
newTestDB
(
t
)
defer
cleanupFunc
()
index
,
err
:=
db
.
NewIndex
(
"retrieval"
,
retrievalIndexFuncs
)
logger
:=
logging
.
New
(
ioutil
.
Discard
,
0
)
index
,
err
:=
db
.
NewIndex
(
"retrieval"
,
retrievalIndexFuncs
,
logger
)
if
err
!=
nil
{
t
.
Fatal
(
err
)
}
...
...
pkg/shed/vector_uint64.go
View file @
dfa4c0dc
...
...
@@ -19,26 +19,29 @@ package shed
import
(
"encoding/binary"
"github.com/ethersphere/bee/pkg/logging"
"github.com/syndtr/goleveldb/leveldb"
)
// Uint64Vector provides a way to have multiple counters in the database.
// It transparently encodes uint64 type value to bytes.
type
Uint64Vector
struct
{
db
*
DB
key
[]
byte
db
*
DB
key
[]
byte
logger
logging
.
Logger
}
// NewUint64Vector returns a new Uint64Vector.
// It validates its name and type against the database schema.
func
(
db
*
DB
)
NewUint64Vector
(
name
string
)
(
f
Uint64Vector
,
err
error
)
{
func
(
db
*
DB
)
NewUint64Vector
(
name
string
,
logger
logging
.
Logger
)
(
f
Uint64Vector
,
err
error
)
{
key
,
err
:=
db
.
schemaFieldKey
(
name
,
"vector-uint64"
)
if
err
!=
nil
{
return
f
,
err
}
return
Uint64Vector
{
db
:
db
,
key
:
key
,
db
:
db
,
key
:
key
,
logger
:
logger
,
},
nil
}
...
...
@@ -75,6 +78,7 @@ func (f Uint64Vector) Inc(i uint64) (val uint64, err error) {
if
err
==
leveldb
.
ErrNotFound
{
val
=
0
}
else
{
f
.
logger
.
Debugf
(
"error getiing value while doing Inc. Error: %s"
,
err
.
Error
())
return
0
,
err
}
}
...
...
@@ -91,6 +95,7 @@ func (f Uint64Vector) IncInBatch(batch *leveldb.Batch, i uint64) (val uint64, er
if
err
==
leveldb
.
ErrNotFound
{
val
=
0
}
else
{
f
.
logger
.
Debugf
(
"error getiing value while doing IncInBatch. Error: %s"
,
err
.
Error
())
return
0
,
err
}
}
...
...
@@ -108,6 +113,7 @@ func (f Uint64Vector) Dec(i uint64) (val uint64, err error) {
if
err
==
leveldb
.
ErrNotFound
{
val
=
0
}
else
{
f
.
logger
.
Debugf
(
"error getiing value while doing Dec. Error: %s"
,
err
.
Error
())
return
0
,
err
}
}
...
...
@@ -127,6 +133,7 @@ func (f Uint64Vector) DecInBatch(batch *leveldb.Batch, i uint64) (val uint64, er
if
err
==
leveldb
.
ErrNotFound
{
val
=
0
}
else
{
f
.
logger
.
Debugf
(
"error getiing value while doing DecInBatch. Error: %s"
,
err
.
Error
())
return
0
,
err
}
}
...
...
pkg/shed/vector_uint64_test.go
View file @
dfa4c0dc
...
...
@@ -17,8 +17,10 @@
package
shed
import
(
"io/ioutil"
"testing"
"github.com/ethersphere/bee/pkg/logging"
"github.com/syndtr/goleveldb/leveldb"
)
...
...
@@ -27,8 +29,8 @@ import (
func
TestUint64Vector
(
t
*
testing
.
T
)
{
db
,
cleanupFunc
:=
newTestDB
(
t
)
defer
cleanupFunc
()
bins
,
err
:=
db
.
NewUint64Vector
(
"bins"
)
logger
:=
logging
.
New
(
ioutil
.
Discard
,
0
)
bins
,
err
:=
db
.
NewUint64Vector
(
"bins"
,
logger
)
if
err
!=
nil
{
t
.
Fatal
(
err
)
}
...
...
@@ -118,8 +120,8 @@ func TestUint64Vector(t *testing.T) {
func
TestUint64Vector_Inc
(
t
*
testing
.
T
)
{
db
,
cleanupFunc
:=
newTestDB
(
t
)
defer
cleanupFunc
()
bins
,
err
:=
db
.
NewUint64Vector
(
"bins"
)
logger
:=
logging
.
New
(
ioutil
.
Discard
,
0
)
bins
,
err
:=
db
.
NewUint64Vector
(
"bins"
,
logger
)
if
err
!=
nil
{
t
.
Fatal
(
err
)
}
...
...
@@ -150,8 +152,8 @@ func TestUint64Vector_Inc(t *testing.T) {
func
TestUint64Vector_IncInBatch
(
t
*
testing
.
T
)
{
db
,
cleanupFunc
:=
newTestDB
(
t
)
defer
cleanupFunc
()
bins
,
err
:=
db
.
NewUint64Vector
(
"bins"
)
logger
:=
logging
.
New
(
ioutil
.
Discard
,
0
)
bins
,
err
:=
db
.
NewUint64Vector
(
"bins"
,
logger
)
if
err
!=
nil
{
t
.
Fatal
(
err
)
}
...
...
@@ -206,8 +208,8 @@ func TestUint64Vector_IncInBatch(t *testing.T) {
func
TestUint64Vector_Dec
(
t
*
testing
.
T
)
{
db
,
cleanupFunc
:=
newTestDB
(
t
)
defer
cleanupFunc
()
bins
,
err
:=
db
.
NewUint64Vector
(
"bins"
)
logger
:=
logging
.
New
(
ioutil
.
Discard
,
0
)
bins
,
err
:=
db
.
NewUint64Vector
(
"bins"
,
logger
)
if
err
!=
nil
{
t
.
Fatal
(
err
)
}
...
...
@@ -245,8 +247,8 @@ func TestUint64Vector_Dec(t *testing.T) {
func
TestUint64Vector_DecInBatch
(
t
*
testing
.
T
)
{
db
,
cleanupFunc
:=
newTestDB
(
t
)
defer
cleanupFunc
()
bins
,
err
:=
db
.
NewUint64Vector
(
"bins"
)
logger
:=
logging
.
New
(
ioutil
.
Discard
,
0
)
bins
,
err
:=
db
.
NewUint64Vector
(
"bins"
,
logger
)
if
err
!=
nil
{
t
.
Fatal
(
err
)
}
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment