Add logs to cache hunter

This commit is contained in:
Deluan
2023-02-06 10:33:34 -05:00
parent a7f15facf9
commit bfaf4a3388
3 changed files with 12 additions and 3 deletions
+1 -1
View File
@@ -207,7 +207,7 @@ func newFSCache(name, cacheSize, cacheFolder string, maxItems int) (fscache.Cach
return nil, nil return nil, nil
} }
lru := NewFileHaunter(maxItems, int64(size), consts.DefaultCacheCleanUpInterval) lru := NewFileHaunter(name, maxItems, int64(size), consts.DefaultCacheCleanUpInterval)
h := fscache.NewLRUHaunterStrategy(lru) h := fscache.NewLRUHaunterStrategy(lru)
cacheFolder = filepath.Join(conf.Server.DataFolder, cacheFolder) cacheFolder = filepath.Join(conf.Server.DataFolder, cacheFolder)
+10 -1
View File
@@ -5,6 +5,7 @@ import (
"time" "time"
"github.com/djherbis/fscache" "github.com/djherbis/fscache"
"github.com/dustin/go-humanize"
"github.com/navidrome/navidrome/log" "github.com/navidrome/navidrome/log"
) )
@@ -20,8 +21,9 @@ type haunterKV struct {
// If maxItems or maxSize are 0, they won't be checked // If maxItems or maxSize are 0, they won't be checked
// //
// Based on fscache.NewLRUHaunter // Based on fscache.NewLRUHaunter
func NewFileHaunter(maxItems int, maxSize int64, period time.Duration) fscache.LRUHaunter { func NewFileHaunter(name string, maxItems int, maxSize int64, period time.Duration) fscache.LRUHaunter {
return &fileHaunter{ return &fileHaunter{
name: name,
period: period, period: period,
maxItems: maxItems, maxItems: maxItems,
maxSize: maxSize, maxSize: maxSize,
@@ -29,6 +31,7 @@ func NewFileHaunter(maxItems int, maxSize int64, period time.Duration) fscache.L
} }
type fileHaunter struct { type fileHaunter struct {
name string
period time.Duration period time.Duration
maxItems int maxItems int
maxSize int64 maxSize int64
@@ -43,6 +46,7 @@ func (j *fileHaunter) Scrub(c fscache.CacheAccessor) (keysToReap []string) {
var size int64 var size int64
var okFiles []haunterKV var okFiles []haunterKV
log.Trace("Running cache cleanup", "cache", j.name, "maxSize", humanize.Bytes(uint64(j.maxSize)))
c.EnumerateEntries(func(key string, e fscache.Entry) bool { c.EnumerateEntries(func(key string, e fscache.Entry) bool {
if e.InUse() { if e.InUse() {
return true return true
@@ -90,6 +94,8 @@ func (j *fileHaunter) Scrub(c fscache.CacheAccessor) (keysToReap []string) {
return true return true
} }
log.Trace("Current cache stats", "cache", j.name, "size", humanize.Bytes(uint64(size)), "numItems", count)
if j.maxItems > 0 { if j.maxItems > 0 {
for count > j.maxItems { for count > j.maxItems {
if !collectKeysToReapFn() { if !collectKeysToReapFn() {
@@ -106,6 +112,9 @@ func (j *fileHaunter) Scrub(c fscache.CacheAccessor) (keysToReap []string) {
} }
} }
if len(keysToReap) > 0 {
log.Trace("Removing items from cache", "cache", j.name, "numItems", len(keysToReap))
}
return keysToReap return keysToReap
} }
+1 -1
View File
@@ -22,7 +22,7 @@ func TestFileHaunterMaxSize(t *testing.T) {
} }
defer os.RemoveAll(tempDir) defer os.RemoveAll(tempDir)
c, err := fscache.NewCacheWithHaunter(fs, fscache.NewLRUHaunterStrategy(cache.NewFileHaunter(0, 24, 400*time.Millisecond))) c, err := fscache.NewCacheWithHaunter(fs, fscache.NewLRUHaunterStrategy(cache.NewFileHaunter("", 0, 24, 400*time.Millisecond)))
if err != nil { if err != nil {
t.Error(err.Error()) t.Error(err.Error())
return return