Commit dfa4c0dc authored by Zahoor Mohamed's avatar Zahoor Mohamed Committed by GitHub

P4 - Replace log from ethereum to bee's own log library (#74)

parent 852fbd93
......@@ -23,6 +23,7 @@
package shed
import (
"github.com/ethersphere/bee/pkg/logging"
"github.com/syndtr/goleveldb/leveldb"
"github.com/syndtr/goleveldb/leveldb/iterator"
"github.com/syndtr/goleveldb/leveldb/opt"
......@@ -39,13 +40,14 @@ const (
type DB struct {
ldb *leveldb.DB
metrics metrics
logger logging.Logger
quit chan struct{} // Quit channel to stop the metrics collection before closing the database
}
// NewDB constructs a new DB and validates the schema
// if it exists in database on the given path.
// metricsPrefix is used for metrics collection for the given DB.
func NewDB(path string, metricsPrefix string) (db *DB, err error) {
func NewDB(path string, logger logging.Logger) (db *DB, err error) {
ldb, err := leveldb.OpenFile(path, &opt.Options{
OpenFilesCacheCapacity: openFileLimit,
})
......@@ -55,6 +57,7 @@ func NewDB(path string, metricsPrefix string) (db *DB, err error) {
db = &DB{
ldb: ldb,
metrics: newMetrics(),
logger: logger,
}
if _, err = db.getSchema(); err != nil {
......@@ -81,6 +84,7 @@ func NewDB(path string, metricsPrefix string) (db *DB, err error) {
func (db *DB) Put(key []byte, value []byte) (err error) {
err = db.ldb.Put(key, value, nil)
if err != nil {
db.logger.Debugf("failed to insert in to DB. Error : %s", err.Error())
db.metrics.PutFailCounter.Inc()
return err
}
......@@ -92,9 +96,11 @@ func (db *DB) Put(key []byte, value []byte) (err error) {
func (db *DB) Get(key []byte) (value []byte, err error) {
value, err = db.ldb.Get(key, nil)
if err == leveldb.ErrNotFound {
db.logger.Debugf("key %s not found during GET", string(key))
db.metrics.GetNotFoundCounter.Inc()
return nil, err
} else {
db.logger.Errorf("key %s not found in DB", string(key))
db.metrics.GetFailCounter.Inc()
}
db.metrics.GetCounter.Inc()
......@@ -105,6 +111,7 @@ func (db *DB) Get(key []byte) (value []byte, err error) {
func (db *DB) Has(key []byte) (yes bool, err error) {
yes, err = db.ldb.Has(key, nil)
if err != nil {
db.logger.Debugf("encountered error during HAS of key %s. Error: %s ", string(key), err.Error())
db.metrics.HasFailCounter.Inc()
return false, err
}
......@@ -116,6 +123,7 @@ func (db *DB) Has(key []byte) (yes bool, err error) {
func (db *DB) Delete(key []byte) (err error) {
err = db.ldb.Delete(key, nil)
if err != nil {
db.logger.Debugf("could not DELETE key %s. Error: %s ", string(key), err.Error())
db.metrics.DeleteFailCounter.Inc()
return err
}
......@@ -133,6 +141,7 @@ func (db *DB) NewIterator() iterator.Iterator {
func (db *DB) WriteBatch(batch *leveldb.Batch) (err error) {
err = db.ldb.Write(batch, nil)
if err != nil {
db.logger.Debugf("could not writing batch. Error: %s ", err.Error())
db.metrics.WriteBatchFailCounter.Inc()
return err
}
......
......@@ -20,6 +20,8 @@ import (
"io/ioutil"
"os"
"testing"
"github.com/ethersphere/bee/pkg/logging"
)
// TestNewDB constructs a new DB
......@@ -54,12 +56,13 @@ func TestDB_persistence(t *testing.T) {
t.Fatal(err)
}
defer os.RemoveAll(dir)
logger := logging.New(ioutil.Discard, 0)
db, err := NewDB(dir, "")
db, err := NewDB(dir, logger)
if err != nil {
t.Fatal(err)
}
stringField, err := db.NewStringField("preserve-me")
stringField, err := db.NewStringField("preserve-me", logger)
if err != nil {
t.Fatal(err)
}
......@@ -73,11 +76,11 @@ func TestDB_persistence(t *testing.T) {
t.Fatal(err)
}
db2, err := NewDB(dir, "")
db2, err := NewDB(dir, logger)
if err != nil {
t.Fatal(err)
}
stringField2, err := db2.NewStringField("preserve-me")
stringField2, err := db2.NewStringField("preserve-me", logger)
if err != nil {
t.Fatal(err)
}
......@@ -100,7 +103,8 @@ func newTestDB(t *testing.T) (db *DB, cleanupFunc func()) {
if err != nil {
t.Fatal(err)
}
db, err = NewDB(dir, "")
logger := logging.New(ioutil.Discard, 0)
db, err = NewDB(dir, logger)
if err != nil {
os.RemoveAll(dir)
t.Fatal(err)
......
......@@ -26,7 +26,8 @@ import (
"os"
"time"
"github.com/ethersphere/swarm/shed"
"github.com/ethersphere/bee/pkg/logging"
"github.com/ethersphere/bee/pkg/shed"
"github.com/ethersphere/swarm/storage"
"github.com/syndtr/goleveldb/leveldb"
)
......@@ -51,7 +52,8 @@ type Store struct {
// and possible conflicts with schema from existing database is checked
// automatically.
func New(path string) (s *Store, err error) {
db, err := shed.NewDB(path, "")
logger := logging.New(ioutil.Discard, 0)
db, err := shed.NewDB(path, logger)
if err != nil {
return nil, err
}
......@@ -59,12 +61,12 @@ func New(path string) (s *Store, err error) {
db: db,
}
// Identify current storage schema by arbitrary name.
s.schemaName, err = db.NewStringField("schema-name")
s.schemaName, err = db.NewStringField("schema-name", logger)
if err != nil {
return nil, err
}
// Global ever incrementing index of chunk accesses.
s.accessCounter, err = db.NewUint64Field("access-counter")
s.accessCounter, err = db.NewUint64Field("access-counter", logger)
if err != nil {
return nil, err
}
......@@ -88,7 +90,7 @@ func New(path string) (s *Store, err error) {
e.Data = value[8:]
return e, nil
},
})
}, logger)
if err != nil {
return nil, err
}
......@@ -111,7 +113,7 @@ func New(path string) (s *Store, err error) {
e.AccessTimestamp = int64(binary.BigEndian.Uint64(value))
return e, nil
},
})
}, logger)
if err != nil {
return nil, err
}
......@@ -136,7 +138,7 @@ func New(path string) (s *Store, err error) {
DecodeValue: func(keyItem shed.Item, value []byte) (e shed.Item, err error) {
return e, nil
},
})
}, logger)
if err != nil {
return nil, err
}
......
......@@ -17,6 +17,7 @@
package shed
import (
"github.com/ethersphere/bee/pkg/logging"
"github.com/syndtr/goleveldb/leveldb"
)
......@@ -25,11 +26,12 @@ import (
type StringField struct {
db *DB
key []byte
logger logging.Logger
}
// NewStringField retruns a new Instance of StringField.
// It validates its name and type against the database schema.
func (db *DB) NewStringField(name string) (f StringField, err error) {
func (db *DB) NewStringField(name string, logger logging.Logger) (f StringField, err error) {
key, err := db.schemaFieldKey(name, "string")
if err != nil {
return f, err
......@@ -37,6 +39,7 @@ func (db *DB) NewStringField(name string) (f StringField, err error) {
return StringField{
db: db,
key: key,
logger: logger,
}, nil
}
......@@ -47,6 +50,7 @@ func (f StringField) Get() (val string, err error) {
b, err := f.db.Get(f.key)
if err != nil {
if err == leveldb.ErrNotFound {
f.logger.Errorf("key %s not found", string(f.key))
return "", nil
}
return "", err
......
......@@ -17,8 +17,10 @@
package shed
import (
"io/ioutil"
"testing"
"github.com/ethersphere/bee/pkg/logging"
"github.com/syndtr/goleveldb/leveldb"
)
......@@ -28,7 +30,8 @@ func TestStringField(t *testing.T) {
db, cleanupFunc := newTestDB(t)
defer cleanupFunc()
simpleString, err := db.NewStringField("simple-string")
logger := logging.New(ioutil.Discard, 0)
simpleString, err := db.NewStringField("simple-string", logger)
if err != nil {
t.Fatal(err)
}
......
......@@ -18,6 +18,8 @@ package shed
import (
"encoding/json"
"github.com/ethersphere/bee/pkg/logging"
"github.com/syndtr/goleveldb/leveldb"
)
......@@ -26,11 +28,12 @@ import (
type StructField struct {
db *DB
key []byte
logger logging.Logger
}
// NewStructField returns a new StructField.
// It validates its name and type against the database schema.
func (db *DB) NewStructField(name string) (f StructField, err error) {
func (db *DB) NewStructField(name string, logger logging.Logger) (f StructField, err error) {
key, err := db.schemaFieldKey(name, "struct-rlp")
if err != nil {
return f, err
......@@ -38,6 +41,7 @@ func (db *DB) NewStructField(name string) (f StructField, err error) {
return StructField{
db: db,
key: key,
logger: logger,
}, nil
}
......@@ -46,6 +50,7 @@ func (db *DB) NewStructField(name string) (f StructField, err error) {
func (f StructField) Get(val interface{}) (err error) {
b, err := f.db.Get(f.key)
if err != nil {
f.logger.Debugf("could not GET key %s", string(f.key))
return err
}
return json.Unmarshal(b, val)
......@@ -55,6 +60,7 @@ func (f StructField) Get(val interface{}) (err error) {
func (f StructField) Put(val interface{}) (err error) {
b, err := json.Marshal(val)
if err != nil {
f.logger.Debugf("could not PUT key %s", string(f.key))
return err
}
return f.db.Put(f.key, b)
......@@ -64,6 +70,7 @@ func (f StructField) Put(val interface{}) (err error) {
func (f StructField) PutInBatch(batch *leveldb.Batch, val interface{}) (err error) {
b, err := json.Marshal(val)
if err != nil {
f.logger.Debugf("could not PUT key %s in batch", string(f.key))
return err
}
batch.Put(f.key, b)
......
......@@ -17,8 +17,10 @@
package shed
import (
"io/ioutil"
"testing"
"github.com/ethersphere/bee/pkg/logging"
"github.com/syndtr/goleveldb/leveldb"
)
......@@ -27,8 +29,8 @@ import (
func TestStructField(t *testing.T) {
db, cleanupFunc := newTestDB(t)
defer cleanupFunc()
complexField, err := db.NewStructField("complex-field")
logger := logging.New(ioutil.Discard, 0)
complexField, err := db.NewStructField("complex-field", logger)
if err != nil {
t.Fatal(err)
}
......
......@@ -19,6 +19,7 @@ package shed
import (
"encoding/binary"
"github.com/ethersphere/bee/pkg/logging"
"github.com/syndtr/goleveldb/leveldb"
)
......@@ -27,11 +28,12 @@ import (
type Uint64Field struct {
db *DB
key []byte
logger logging.Logger
}
// NewUint64Field returns a new Uint64Field.
// It validates its name and type against the database schema.
func (db *DB) NewUint64Field(name string) (f Uint64Field, err error) {
func (db *DB) NewUint64Field(name string, logger logging.Logger) (f Uint64Field, err error) {
key, err := db.schemaFieldKey(name, "uint64")
if err != nil {
return f, err
......@@ -39,6 +41,7 @@ func (db *DB) NewUint64Field(name string) (f Uint64Field, err error) {
return Uint64Field{
db: db,
key: key,
logger: logger,
}, nil
}
......@@ -49,6 +52,7 @@ func (f Uint64Field) Get() (val uint64, err error) {
b, err := f.db.Get(f.key)
if err != nil {
if err == leveldb.ErrNotFound {
f.logger.Errorf("key %s not found", string(f.key))
return 0, nil
}
return 0, err
......@@ -73,8 +77,10 @@ func (f Uint64Field) Inc() (val uint64, err error) {
val, err = f.Get()
if err != nil {
if err == leveldb.ErrNotFound {
f.logger.Debugf("key %s not found", string(f.key))
val = 0
} else {
f.logger.Errorf("key %s not found. Error: %s", string(f.key), err.Error())
return 0, err
}
}
......@@ -89,8 +95,10 @@ func (f Uint64Field) IncInBatch(batch *leveldb.Batch) (val uint64, err error) {
val, err = f.Get()
if err != nil {
if err == leveldb.ErrNotFound {
f.logger.Debugf("key %s not found", string(f.key))
val = 0
} else {
f.logger.Errorf("key %s not found. Error: %s", string(f.key), err.Error())
return 0, err
}
}
......@@ -106,8 +114,10 @@ func (f Uint64Field) Dec() (val uint64, err error) {
val, err = f.Get()
if err != nil {
if err == leveldb.ErrNotFound {
f.logger.Debugf("key %s not found", string(f.key))
val = 0
} else {
f.logger.Errorf("key %s not found. Error: %s", string(f.key), err.Error())
return 0, err
}
}
......@@ -125,8 +135,10 @@ func (f Uint64Field) DecInBatch(batch *leveldb.Batch) (val uint64, err error) {
val, err = f.Get()
if err != nil {
if err == leveldb.ErrNotFound {
f.logger.Debugf("key %s not found", string(f.key))
val = 0
} else {
f.logger.Errorf("key %s not found. Error: %s", string(f.key), err.Error())
return 0, err
}
}
......
......@@ -17,8 +17,10 @@
package shed
import (
"io/ioutil"
"testing"
"github.com/ethersphere/bee/pkg/logging"
"github.com/syndtr/goleveldb/leveldb"
)
......@@ -27,8 +29,8 @@ import (
func TestUint64Field(t *testing.T) {
db, cleanupFunc := newTestDB(t)
defer cleanupFunc()
counter, err := db.NewUint64Field("counter")
logger := logging.New(ioutil.Discard, 0)
counter, err := db.NewUint64Field("counter", logger)
if err != nil {
t.Fatal(err)
}
......@@ -114,8 +116,8 @@ func TestUint64Field(t *testing.T) {
func TestUint64Field_Inc(t *testing.T) {
db, cleanupFunc := newTestDB(t)
defer cleanupFunc()
counter, err := db.NewUint64Field("counter")
logger := logging.New(ioutil.Discard, 0)
counter, err := db.NewUint64Field("counter", logger)
if err != nil {
t.Fatal(err)
}
......@@ -144,8 +146,8 @@ func TestUint64Field_Inc(t *testing.T) {
func TestUint64Field_IncInBatch(t *testing.T) {
db, cleanupFunc := newTestDB(t)
defer cleanupFunc()
counter, err := db.NewUint64Field("counter")
logger := logging.New(ioutil.Discard, 0)
counter, err := db.NewUint64Field("counter", logger)
if err != nil {
t.Fatal(err)
}
......@@ -198,8 +200,8 @@ func TestUint64Field_IncInBatch(t *testing.T) {
func TestUint64Field_Dec(t *testing.T) {
db, cleanupFunc := newTestDB(t)
defer cleanupFunc()
counter, err := db.NewUint64Field("counter")
logger := logging.New(ioutil.Discard, 0)
counter, err := db.NewUint64Field("counter", logger)
if err != nil {
t.Fatal(err)
}
......@@ -235,8 +237,8 @@ func TestUint64Field_Dec(t *testing.T) {
func TestUint64Field_DecInBatch(t *testing.T) {
db, cleanupFunc := newTestDB(t)
defer cleanupFunc()
counter, err := db.NewUint64Field("counter")
logger := logging.New(ioutil.Discard, 0)
counter, err := db.NewUint64Field("counter", logger)
if err != nil {
t.Fatal(err)
}
......
......@@ -19,6 +19,7 @@ package shed
import (
"bytes"
"github.com/ethersphere/bee/pkg/logging"
"github.com/syndtr/goleveldb/leveldb"
"github.com/syndtr/goleveldb/leveldb/iterator"
)
......@@ -82,6 +83,7 @@ func (i Item) Merge(i2 Item) (new Item) {
// It implements IndexIteratorInterface interface.
type Index struct {
db *DB
logger logging.Logger
prefix []byte
encodeKeyFunc func(fields Item) (key []byte, err error)
decodeKeyFunc func(key []byte) (e Item, err error)
......@@ -101,7 +103,7 @@ type IndexFuncs struct {
// NewIndex returns a new Index instance with defined name and
// encoding functions. The name must be unique and will be validated
// on database schema for a key prefix byte.
func (db *DB) NewIndex(name string, funcs IndexFuncs) (f Index, err error) {
func (db *DB) NewIndex(name string, funcs IndexFuncs, logger logging.Logger) (f Index, err error) {
id, err := db.schemaIndexPrefix(name)
if err != nil {
return f, err
......@@ -109,6 +111,7 @@ func (db *DB) NewIndex(name string, funcs IndexFuncs) (f Index, err error) {
prefix := []byte{id}
return Index{
db: db,
logger: logger,
prefix: prefix,
// This function adjusts Index LevelDB key
// by appending the provided index id byte.
......@@ -138,14 +141,17 @@ func (db *DB) NewIndex(name string, funcs IndexFuncs) (f Index, err error) {
func (f Index) Get(keyFields Item) (out Item, err error) {
key, err := f.encodeKeyFunc(keyFields)
if err != nil {
f.logger.Debugf("keyfields encoding error in Get. Error: %s", err.Error())
return out, err
}
value, err := f.db.Get(key)
if err != nil {
f.logger.Debugf("error getting key %s in Get. Error: %s", string(key), err.Error())
return out, err
}
out, err = f.decodeValueFunc(keyFields, value)
if err != nil {
f.logger.Debugf("error decofing keyfields in Get. Error: %s", err.Error())
return out, err
}
return out.Merge(keyFields), nil
......@@ -160,6 +166,7 @@ func (f Index) Get(keyFields Item) (out Item, err error) {
func (f Index) Fill(items []Item) (err error) {
snapshot, err := f.db.ldb.GetSnapshot()
if err != nil {
f.logger.Debugf("error getting snapshot in Fill. Error: %s", err.Error())
return err
}
defer snapshot.Release()
......@@ -167,14 +174,17 @@ func (f Index) Fill(items []Item) (err error) {
for i, item := range items {
key, err := f.encodeKeyFunc(item)
if err != nil {
f.logger.Debugf("keyfields encoding error in Fill. Error: %s", err.Error())
return err
}
value, err := snapshot.Get(key, nil)
if err != nil {
f.logger.Debugf("error getting key %s in Fill. Error: %s", string(key), err.Error())
return err
}
v, err := f.decodeValueFunc(item, value)
if err != nil {
f.logger.Debugf("error decofing keyfields in Fill . Error: %s", err.Error())
return err
}
items[i] = v.Merge(item)
......@@ -188,6 +198,7 @@ func (f Index) Fill(items []Item) (err error) {
func (f Index) Has(keyFields Item) (bool, error) {
key, err := f.encodeKeyFunc(keyFields)
if err != nil {
f.logger.Debugf("keyfields encoding error in Has. Error: %s", err.Error())
return false, err
}
return f.db.Has(key)
......@@ -199,16 +210,19 @@ func (f Index) HasMulti(items ...Item) ([]bool, error) {
have := make([]bool, len(items))
snapshot, err := f.db.ldb.GetSnapshot()
if err != nil {
f.logger.Debugf("error getting snapshot in HasMulti. Error: %s", err.Error())
return nil, err
}
defer snapshot.Release()
for i, keyFields := range items {
key, err := f.encodeKeyFunc(keyFields)
if err != nil {
f.logger.Debugf("keyfields encoding error in HasMulti. Error: %s", err.Error())
return nil, err
}
have[i], err = snapshot.Has(key, nil)
if err != nil {
f.logger.Debugf("snaoshot Has error in HasMulti. Error: %s", err.Error())
return nil, err
}
}
......@@ -220,10 +234,12 @@ func (f Index) HasMulti(items ...Item) ([]bool, error) {
func (f Index) Put(i Item) (err error) {
key, err := f.encodeKeyFunc(i)
if err != nil {
f.logger.Debugf("keyfields encoding error in Put. Error: %s", err.Error())
return err
}
value, err := f.encodeValueFunc(i)
if err != nil {
f.logger.Debugf("keyfields encoding error in Put. Error: %s", err.Error())
return err
}
return f.db.Put(key, value)
......@@ -235,10 +251,12 @@ func (f Index) Put(i Item) (err error) {
func (f Index) PutInBatch(batch *leveldb.Batch, i Item) (err error) {
key, err := f.encodeKeyFunc(i)
if err != nil {
f.logger.Debugf("keyfields encoding error in PutInBatch. Error: %s", err.Error())
return err
}
value, err := f.encodeValueFunc(i)
if err != nil {
f.logger.Debugf("keyfields encoding error in PutInBatch. Error: %s", err.Error())
return err
}
batch.Put(key, value)
......@@ -250,6 +268,7 @@ func (f Index) PutInBatch(batch *leveldb.Batch, i Item) (err error) {
func (f Index) Delete(keyFields Item) (err error) {
key, err := f.encodeKeyFunc(keyFields)
if err != nil {
f.logger.Debugf("keyfields encoding error in Delete. Error: %s", err.Error())
return err
}
return f.db.Delete(key)
......@@ -260,6 +279,7 @@ func (f Index) Delete(keyFields Item) (err error) {
func (f Index) DeleteInBatch(batch *leveldb.Batch, keyFields Item) (err error) {
key, err := f.encodeKeyFunc(keyFields)
if err != nil {
f.logger.Debugf("keyfields encoding error in DeleteInBatch. Error: %s", err.Error())
return err
}
batch.Delete(key)
......@@ -298,6 +318,7 @@ func (f Index) Iterate(fn IndexIterFunc, options *IterateOptions) (err error) {
// start from the provided StartFrom Item key value
startKey, err = f.encodeKeyFunc(*options.StartFrom)
if err != nil {
f.logger.Debugf("keyfields encoding error in Iterate. Error: %s", err.Error())
return err
}
}
......@@ -307,6 +328,7 @@ func (f Index) Iterate(fn IndexIterFunc, options *IterateOptions) (err error) {
// move the cursor to the start key
ok := it.Seek(startKey)
if !ok {
f.logger.Debugf("seek error in Iterate. Error: %s", it.Error())
// stop iterator if seek has failed
return it.Error()
}
......@@ -325,6 +347,7 @@ func (f Index) Iterate(fn IndexIterFunc, options *IterateOptions) (err error) {
}
stop, err := fn(item)
if err != nil {
f.logger.Debugf("error executing callback function in Iterate. Error: %s", err.Error())
return err
}
if stop {
......@@ -359,11 +382,13 @@ func (f Index) itemFromIterator(it iterator.Iterator, totalPrefix []byte) (i Ite
// create a copy of key byte slice not to share leveldb underlaying slice array
keyItem, err := f.decodeKeyFunc(append([]byte(nil), key...))
if err != nil {
f.logger.Debugf("error decoding key in itemFromIterator. Error: %s", err.Error())
return i, err
}
// create a copy of value byte slice not to share leveldb underlaying slice array
valueItem, err := f.decodeValueFunc(keyItem, append([]byte(nil), it.Value()...))
if err != nil {
f.logger.Debugf("error decoding value in itemFromIterator. Error: %s", err.Error())
return i, err
}
return keyItem.Merge(valueItem), it.Error()
......@@ -435,6 +460,7 @@ func (f Index) Count() (count int, err error) {
func (f Index) CountFrom(start Item) (count int, err error) {
startKey, err := f.encodeKeyFunc(start)
if err != nil {
f.logger.Debugf("error encoding item in CountFrom. Error: %s", err.Error())
return 0, err
}
it := f.db.NewIterator()
......
......@@ -20,10 +20,12 @@ import (
"bytes"
"encoding/binary"
"fmt"
"io/ioutil"
"sort"
"testing"
"time"
"github.com/ethersphere/bee/pkg/logging"
"github.com/syndtr/goleveldb/leveldb"
)
......@@ -53,8 +55,8 @@ var retrievalIndexFuncs = IndexFuncs{
func TestIndex(t *testing.T) {
db, cleanupFunc := newTestDB(t)
defer cleanupFunc()
index, err := db.NewIndex("retrieval", retrievalIndexFuncs)
logger := logging.New(ioutil.Discard, 0)
index, err := db.NewIndex("retrieval", retrievalIndexFuncs, logger)
if err != nil {
t.Fatal(err)
}
......@@ -367,8 +369,8 @@ func TestIndex(t *testing.T) {
func TestIndex_Iterate(t *testing.T) {
db, cleanupFunc := newTestDB(t)
defer cleanupFunc()
index, err := db.NewIndex("retrieval", retrievalIndexFuncs)
logger := logging.New(ioutil.Discard, 0)
index, err := db.NewIndex("retrieval", retrievalIndexFuncs, logger)
if err != nil {
t.Fatal(err)
}
......@@ -502,7 +504,8 @@ func TestIndex_Iterate(t *testing.T) {
})
t.Run("no overflow", func(t *testing.T) {
secondIndex, err := db.NewIndex("second-index", retrievalIndexFuncs)
logger := logging.New(ioutil.Discard, 0)
secondIndex, err := db.NewIndex("second-index", retrievalIndexFuncs, logger)
if err != nil {
t.Fatal(err)
}
......@@ -550,8 +553,8 @@ func TestIndex_Iterate(t *testing.T) {
func TestIndex_Iterate_withPrefix(t *testing.T) {
db, cleanupFunc := newTestDB(t)
defer cleanupFunc()
index, err := db.NewIndex("retrieval", retrievalIndexFuncs)
logger := logging.New(ioutil.Discard, 0)
index, err := db.NewIndex("retrieval", retrievalIndexFuncs, logger)
if err != nil {
t.Fatal(err)
}
......@@ -697,7 +700,8 @@ func TestIndex_Iterate_withPrefix(t *testing.T) {
})
t.Run("no overflow", func(t *testing.T) {
secondIndex, err := db.NewIndex("second-index", retrievalIndexFuncs)
logger := logging.New(ioutil.Discard, 0)
secondIndex, err := db.NewIndex("second-index", retrievalIndexFuncs, logger)
if err != nil {
t.Fatal(err)
}
......@@ -737,8 +741,8 @@ func TestIndex_Iterate_withPrefix(t *testing.T) {
func TestIndex_count(t *testing.T) {
db, cleanupFunc := newTestDB(t)
defer cleanupFunc()
index, err := db.NewIndex("retrieval", retrievalIndexFuncs)
logger := logging.New(ioutil.Discard, 0)
index, err := db.NewIndex("retrieval", retrievalIndexFuncs, logger)
if err != nil {
t.Fatal(err)
}
......@@ -907,8 +911,8 @@ func checkItem(t *testing.T, got, want Item) {
func TestIndex_firstAndLast(t *testing.T) {
db, cleanupFunc := newTestDB(t)
defer cleanupFunc()
index, err := db.NewIndex("retrieval", retrievalIndexFuncs)
logger := logging.New(ioutil.Discard, 0)
index, err := db.NewIndex("retrieval", retrievalIndexFuncs, logger)
if err != nil {
t.Fatal(err)
}
......@@ -1056,8 +1060,8 @@ func TestIncByteSlice(t *testing.T) {
func TestIndex_HasMulti(t *testing.T) {
db, cleanupFunc := newTestDB(t)
defer cleanupFunc()
index, err := db.NewIndex("retrieval", retrievalIndexFuncs)
logger := logging.New(ioutil.Discard, 0)
index, err := db.NewIndex("retrieval", retrievalIndexFuncs, logger)
if err != nil {
t.Fatal(err)
}
......
......@@ -19,6 +19,7 @@ package shed
import (
"encoding/binary"
"github.com/ethersphere/bee/pkg/logging"
"github.com/syndtr/goleveldb/leveldb"
)
......@@ -27,11 +28,12 @@ import (
type Uint64Vector struct {
db *DB
key []byte
logger logging.Logger
}
// NewUint64Vector returns a new Uint64Vector.
// It validates its name and type against the database schema.
func (db *DB) NewUint64Vector(name string) (f Uint64Vector, err error) {
func (db *DB) NewUint64Vector(name string, logger logging.Logger) (f Uint64Vector, err error) {
key, err := db.schemaFieldKey(name, "vector-uint64")
if err != nil {
return f, err
......@@ -39,6 +41,7 @@ func (db *DB) NewUint64Vector(name string) (f Uint64Vector, err error) {
return Uint64Vector{
db: db,
key: key,
logger: logger,
}, nil
}
......@@ -75,6 +78,7 @@ func (f Uint64Vector) Inc(i uint64) (val uint64, err error) {
if err == leveldb.ErrNotFound {
val = 0
} else {
f.logger.Debugf("error getiing value while doing Inc. Error: %s", err.Error())
return 0, err
}
}
......@@ -91,6 +95,7 @@ func (f Uint64Vector) IncInBatch(batch *leveldb.Batch, i uint64) (val uint64, er
if err == leveldb.ErrNotFound {
val = 0
} else {
f.logger.Debugf("error getiing value while doing IncInBatch. Error: %s", err.Error())
return 0, err
}
}
......@@ -108,6 +113,7 @@ func (f Uint64Vector) Dec(i uint64) (val uint64, err error) {
if err == leveldb.ErrNotFound {
val = 0
} else {
f.logger.Debugf("error getiing value while doing Dec. Error: %s", err.Error())
return 0, err
}
}
......@@ -127,6 +133,7 @@ func (f Uint64Vector) DecInBatch(batch *leveldb.Batch, i uint64) (val uint64, er
if err == leveldb.ErrNotFound {
val = 0
} else {
f.logger.Debugf("error getiing value while doing DecInBatch. Error: %s", err.Error())
return 0, err
}
}
......
......@@ -17,8 +17,10 @@
package shed
import (
"io/ioutil"
"testing"
"github.com/ethersphere/bee/pkg/logging"
"github.com/syndtr/goleveldb/leveldb"
)
......@@ -27,8 +29,8 @@ import (
func TestUint64Vector(t *testing.T) {
db, cleanupFunc := newTestDB(t)
defer cleanupFunc()
bins, err := db.NewUint64Vector("bins")
logger := logging.New(ioutil.Discard, 0)
bins, err := db.NewUint64Vector("bins", logger)
if err != nil {
t.Fatal(err)
}
......@@ -118,8 +120,8 @@ func TestUint64Vector(t *testing.T) {
func TestUint64Vector_Inc(t *testing.T) {
db, cleanupFunc := newTestDB(t)
defer cleanupFunc()
bins, err := db.NewUint64Vector("bins")
logger := logging.New(ioutil.Discard, 0)
bins, err := db.NewUint64Vector("bins", logger)
if err != nil {
t.Fatal(err)
}
......@@ -150,8 +152,8 @@ func TestUint64Vector_Inc(t *testing.T) {
func TestUint64Vector_IncInBatch(t *testing.T) {
db, cleanupFunc := newTestDB(t)
defer cleanupFunc()
bins, err := db.NewUint64Vector("bins")
logger := logging.New(ioutil.Discard, 0)
bins, err := db.NewUint64Vector("bins", logger)
if err != nil {
t.Fatal(err)
}
......@@ -206,8 +208,8 @@ func TestUint64Vector_IncInBatch(t *testing.T) {
func TestUint64Vector_Dec(t *testing.T) {
db, cleanupFunc := newTestDB(t)
defer cleanupFunc()
bins, err := db.NewUint64Vector("bins")
logger := logging.New(ioutil.Discard, 0)
bins, err := db.NewUint64Vector("bins", logger)
if err != nil {
t.Fatal(err)
}
......@@ -245,8 +247,8 @@ func TestUint64Vector_Dec(t *testing.T) {
func TestUint64Vector_DecInBatch(t *testing.T) {
db, cleanupFunc := newTestDB(t)
defer cleanupFunc()
bins, err := db.NewUint64Vector("bins")
logger := logging.New(ioutil.Discard, 0)
bins, err := db.NewUint64Vector("bins", logger)
if err != nil {
t.Fatal(err)
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment