update
Some checks failed
Pipeline: Test, Lint, Build / Get version info (push) Has been cancelled
Pipeline: Test, Lint, Build / Lint Go code (push) Has been cancelled
Pipeline: Test, Lint, Build / Test Go code (push) Has been cancelled
Pipeline: Test, Lint, Build / Test JS code (push) Has been cancelled
Pipeline: Test, Lint, Build / Lint i18n files (push) Has been cancelled
Pipeline: Test, Lint, Build / Check Docker configuration (push) Has been cancelled
Pipeline: Test, Lint, Build / Build (darwin/amd64) (push) Has been cancelled
Pipeline: Test, Lint, Build / Build (darwin/arm64) (push) Has been cancelled
Pipeline: Test, Lint, Build / Build (linux/386) (push) Has been cancelled
Pipeline: Test, Lint, Build / Build (linux/amd64) (push) Has been cancelled
Pipeline: Test, Lint, Build / Build (linux/arm/v5) (push) Has been cancelled
Pipeline: Test, Lint, Build / Build (linux/arm/v6) (push) Has been cancelled
Pipeline: Test, Lint, Build / Build (linux/arm/v7) (push) Has been cancelled
Pipeline: Test, Lint, Build / Build (linux/arm64) (push) Has been cancelled
Pipeline: Test, Lint, Build / Build (windows/386) (push) Has been cancelled
Pipeline: Test, Lint, Build / Build (windows/amd64) (push) Has been cancelled
Pipeline: Test, Lint, Build / Push to GHCR (push) Has been cancelled
Pipeline: Test, Lint, Build / Push to Docker Hub (push) Has been cancelled
Pipeline: Test, Lint, Build / Cleanup digest artifacts (push) Has been cancelled
Pipeline: Test, Lint, Build / Build Windows installers (push) Has been cancelled
Pipeline: Test, Lint, Build / Package/Release (push) Has been cancelled
Pipeline: Test, Lint, Build / Upload Linux PKG (push) Has been cancelled
Close stale issues and PRs / stale (push) Has been cancelled
POEditor import / update-translations (push) Has been cancelled

This commit is contained in:
2025-12-08 16:16:23 +01:00
commit c251f174ed
1349 changed files with 194301 additions and 0 deletions

17
utils/cache/cache_suite_test.go vendored Normal file
View File

@@ -0,0 +1,17 @@
package cache
import (
"testing"
"github.com/navidrome/navidrome/log"
"github.com/navidrome/navidrome/tests"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
func TestCache(t *testing.T) {
tests.Init(t, false)
log.SetLevel(log.LevelFatal)
RegisterFailHandler(Fail)
RunSpecs(t, "Cache Suite")
}

110
utils/cache/cached_http_client.go vendored Normal file
View File

@@ -0,0 +1,110 @@
package cache
import (
"bufio"
"bytes"
"encoding/base64"
"encoding/json"
"io"
"net/http"
"strings"
"time"
"github.com/navidrome/navidrome/log"
)
const cacheSizeLimit = 100
type HTTPClient struct {
cache SimpleCache[string, string]
hc httpDoer
ttl time.Duration
}
type httpDoer interface {
Do(req *http.Request) (*http.Response, error)
}
type requestData struct {
Method string
Header http.Header
URL string
Body *string
}
func NewHTTPClient(wrapped httpDoer, ttl time.Duration) *HTTPClient {
c := &HTTPClient{hc: wrapped, ttl: ttl}
c.cache = NewSimpleCache[string, string](Options{
SizeLimit: cacheSizeLimit,
DefaultTTL: ttl,
})
return c
}
func (c *HTTPClient) Do(req *http.Request) (*http.Response, error) {
key := c.serializeReq(req)
cached := true
start := time.Now()
respStr, err := c.cache.GetWithLoader(key, func(key string) (string, time.Duration, error) {
cached = false
req, err := c.deserializeReq(key)
if err != nil {
log.Trace(req.Context(), "CachedHTTPClient.Do", "key", key, err)
return "", 0, err
}
resp, err := c.hc.Do(req)
if err != nil {
log.Trace(req.Context(), "CachedHTTPClient.Do", "req", req, err)
return "", 0, err
}
defer resp.Body.Close()
return c.serializeResponse(resp), c.ttl, nil
})
log.Trace(req.Context(), "CachedHTTPClient.Do", "key", key, "cached", cached, "elapsed", time.Since(start), err)
if err != nil {
return nil, err
}
return c.deserializeResponse(req, respStr)
}
func (c *HTTPClient) serializeReq(req *http.Request) string {
data := requestData{
Method: req.Method,
Header: req.Header,
URL: req.URL.String(),
}
if req.Body != nil {
bodyData, _ := io.ReadAll(req.Body)
bodyStr := base64.StdEncoding.EncodeToString(bodyData)
data.Body = &bodyStr
}
j, _ := json.Marshal(&data)
return string(j)
}
func (c *HTTPClient) deserializeReq(reqStr string) (*http.Request, error) {
var data requestData
_ = json.Unmarshal([]byte(reqStr), &data)
var body io.Reader
if data.Body != nil {
bodyStr, _ := base64.StdEncoding.DecodeString(*data.Body)
body = strings.NewReader(string(bodyStr))
}
req, err := http.NewRequest(data.Method, data.URL, body)
if err != nil {
return nil, err
}
req.Header = data.Header
return req, nil
}
func (c *HTTPClient) serializeResponse(resp *http.Response) string {
var b = &bytes.Buffer{}
_ = resp.Write(b)
return b.String()
}
func (c *HTTPClient) deserializeResponse(req *http.Request, respStr string) (*http.Response, error) {
r := bufio.NewReader(strings.NewReader(respStr))
return http.ReadResponse(r, req)
}

93
utils/cache/cached_http_client_test.go vendored Normal file
View File

@@ -0,0 +1,93 @@
package cache
import (
"fmt"
"io"
"net/http"
"net/http/httptest"
"time"
"github.com/navidrome/navidrome/consts"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var _ = Describe("HTTPClient", func() {
Context("GET", func() {
var chc *HTTPClient
var ts *httptest.Server
var requestsReceived int
var header string
BeforeEach(func() {
ts = httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
requestsReceived++
header = r.Header.Get("head")
_, _ = fmt.Fprintf(w, "Hello, %s", r.URL.Query()["name"])
}))
chc = NewHTTPClient(http.DefaultClient, consts.DefaultHttpClientTimeOut)
})
AfterEach(func() {
defer ts.Close()
})
It("caches repeated requests", func() {
r, _ := http.NewRequest("GET", ts.URL+"?name=doe", nil)
resp, err := chc.Do(r)
Expect(err).To(BeNil())
body, err := io.ReadAll(resp.Body)
Expect(err).To(BeNil())
Expect(string(body)).To(Equal("Hello, [doe]"))
Expect(requestsReceived).To(Equal(1))
// Same request
r, _ = http.NewRequest("GET", ts.URL+"?name=doe", nil)
resp, err = chc.Do(r)
Expect(err).To(BeNil())
body, err = io.ReadAll(resp.Body)
Expect(err).To(BeNil())
Expect(string(body)).To(Equal("Hello, [doe]"))
Expect(requestsReceived).To(Equal(1))
// Different request
r, _ = http.NewRequest("GET", ts.URL, nil)
resp, err = chc.Do(r)
Expect(err).To(BeNil())
body, err = io.ReadAll(resp.Body)
Expect(err).To(BeNil())
Expect(string(body)).To(Equal("Hello, []"))
Expect(requestsReceived).To(Equal(2))
// Different again (same as before, but with header)
r, _ = http.NewRequest("GET", ts.URL, nil)
r.Header.Add("head", "this is a header")
resp, err = chc.Do(r)
Expect(err).To(BeNil())
body, err = io.ReadAll(resp.Body)
Expect(err).To(BeNil())
Expect(string(body)).To(Equal("Hello, []"))
Expect(header).To(Equal("this is a header"))
Expect(requestsReceived).To(Equal(3))
})
It("expires responses after TTL", func() {
requestsReceived = 0
chc = NewHTTPClient(http.DefaultClient, 10*time.Millisecond)
r, _ := http.NewRequest("GET", ts.URL+"?name=doe", nil)
_, err := chc.Do(r)
Expect(err).To(BeNil())
Expect(requestsReceived).To(Equal(1))
// Wait more than the TTL
time.Sleep(50 * time.Millisecond)
// Same request
r, _ = http.NewRequest("GET", ts.URL+"?name=doe", nil)
_, err = chc.Do(r)
Expect(err).To(BeNil())
Expect(requestsReceived).To(Equal(2))
})
})
})

283
utils/cache/file_caches.go vendored Normal file
View File

@@ -0,0 +1,283 @@
package cache
import (
"context"
"fmt"
"io"
"path/filepath"
"sync"
"sync/atomic"
"time"
"github.com/djherbis/fscache"
"github.com/dustin/go-humanize"
"github.com/hashicorp/go-multierror"
"github.com/navidrome/navidrome/conf"
"github.com/navidrome/navidrome/consts"
"github.com/navidrome/navidrome/log"
)
// Item represents an item that can be cached. It must implement the Key method that returns a unique key for a
// given item.
type Item interface {
Key() string
}
// ReadFunc is a function that retrieves the data to be cached. It receives the Item to be cached and returns
// an io.Reader with the data and an error.
type ReadFunc func(ctx context.Context, item Item) (io.Reader, error)
// FileCache is designed to cache data on the filesystem to improve performance by avoiding repeated data
// retrieval operations.
//
// Errors are handled gracefully. If the cache is not initialized or an error occurs during data
// retrieval, it will log the error and proceed without caching.
type FileCache interface {
// Get retrieves data from the cache. This method checks if the data is already cached. If it is, it
// returns the cached data. If not, it retrieves the data using the provided getReader function and caches it.
//
// Example Usage:
//
// s, err := fc.Get(context.Background(), cacheKey("testKey"))
// if err != nil {
// log.Fatal(err)
// }
// defer s.Close()
//
// data, err := io.ReadAll(s)
// if err != nil {
// log.Fatal(err)
// }
// fmt.Println(string(data))
Get(ctx context.Context, item Item) (*CachedStream, error)
// Available checks if the cache is available
Available(ctx context.Context) bool
// Disabled reports if the cache has been permanently disabled
Disabled(ctx context.Context) bool
}
// NewFileCache creates a new FileCache. This function initializes the cache and starts it in the background.
//
// name: A string representing the name of the cache.
// cacheSize: A string representing the maximum size of the cache (e.g., "1KB", "10MB").
// cacheFolder: A string representing the folder where the cache files will be stored.
// maxItems: An integer representing the maximum number of items the cache can hold.
// getReader: A function of type ReadFunc that retrieves the data to be cached.
//
// Example Usage:
//
// fc := NewFileCache("exampleCache", "10MB", "cacheFolder", 100, func(ctx context.Context, item Item) (io.Reader, error) {
// // Implement the logic to retrieve the data for the given item
// return strings.NewReader(item.Key()), nil
// })
func NewFileCache(name, cacheSize, cacheFolder string, maxItems int, getReader ReadFunc) FileCache {
fc := &fileCache{
name: name,
cacheSize: cacheSize,
cacheFolder: filepath.FromSlash(cacheFolder),
maxItems: maxItems,
getReader: getReader,
mutex: &sync.RWMutex{},
}
go func() {
start := time.Now()
cache, err := newFSCache(fc.name, fc.cacheSize, fc.cacheFolder, fc.maxItems)
fc.mutex.Lock()
defer fc.mutex.Unlock()
fc.cache = cache
fc.disabled = cache == nil || err != nil
log.Info("Finished initializing cache", "cache", fc.name, "maxSize", fc.cacheSize, "elapsedTime", time.Since(start))
fc.ready.Store(true)
if err != nil {
log.Error(fmt.Sprintf("Cache %s will be DISABLED due to previous errors", "name"), fc.name, err)
}
if fc.disabled {
log.Debug("Cache DISABLED", "cache", fc.name, "size", fc.cacheSize)
}
}()
return fc
}
type fileCache struct {
name string
cacheSize string
cacheFolder string
maxItems int
cache fscache.Cache
getReader ReadFunc
disabled bool
ready atomic.Bool
mutex *sync.RWMutex
}
func (fc *fileCache) Available(_ context.Context) bool {
fc.mutex.RLock()
defer fc.mutex.RUnlock()
return fc.ready.Load() && !fc.disabled
}
func (fc *fileCache) Disabled(_ context.Context) bool {
fc.mutex.RLock()
defer fc.mutex.RUnlock()
return fc.disabled
}
func (fc *fileCache) invalidate(ctx context.Context, key string) error {
if !fc.Available(ctx) {
log.Debug(ctx, "Cache not initialized yet. Cannot invalidate key", "cache", fc.name, "key", key)
return nil
}
if !fc.cache.Exists(key) {
return nil
}
err := fc.cache.Remove(key)
if err != nil {
log.Warn(ctx, "Error removing key from cache", "cache", fc.name, "key", key, err)
}
return err
}
func (fc *fileCache) Get(ctx context.Context, arg Item) (*CachedStream, error) {
if !fc.Available(ctx) {
log.Debug(ctx, "Cache not initialized yet. Reading data directly from reader", "cache", fc.name)
reader, err := fc.getReader(ctx, arg)
if err != nil {
return nil, err
}
return &CachedStream{Reader: reader}, nil
}
key := arg.Key()
r, w, err := fc.cache.Get(key)
if err != nil {
return nil, err
}
cached := w == nil
if !cached {
log.Trace(ctx, "Cache MISS", "cache", fc.name, "key", key)
reader, err := fc.getReader(ctx, arg)
if err != nil {
_ = r.Close()
_ = w.Close()
_ = fc.invalidate(ctx, key)
return nil, err
}
go func() {
if err := copyAndClose(w, reader); err != nil {
log.Debug(ctx, "Error storing file in cache", "cache", fc.name, "key", key, err)
_ = fc.invalidate(ctx, key)
} else {
log.Trace(ctx, "File successfully stored in cache", "cache", fc.name, "key", key)
}
}()
}
// If it is in the cache, check if the stream is done being written. If so, return a ReadSeeker
if cached {
size := getFinalCachedSize(r)
if size >= 0 {
log.Trace(ctx, "Cache HIT", "cache", fc.name, "key", key, "size", size)
sr := io.NewSectionReader(r, 0, size)
return &CachedStream{
Reader: sr,
Seeker: sr,
Closer: r,
Cached: true,
}, nil
} else {
log.Trace(ctx, "Cache HIT", "cache", fc.name, "key", key)
}
}
// All other cases, just return the cache reader, without Seek capabilities
return &CachedStream{Reader: r, Cached: cached}, nil
}
// CachedStream is a wrapper around an io.ReadCloser that allows reading from a cache.
type CachedStream struct {
io.Reader
io.Seeker
io.Closer
Cached bool
}
func (s *CachedStream) Close() error {
if s.Closer != nil {
return s.Closer.Close()
}
if c, ok := s.Reader.(io.Closer); ok {
return c.Close()
}
return nil
}
func getFinalCachedSize(r fscache.ReadAtCloser) int64 {
cr, ok := r.(*fscache.CacheReader)
if ok {
size, final, err := cr.Size()
if final && err == nil {
return size
}
}
return -1
}
func copyAndClose(w io.WriteCloser, r io.Reader) error {
_, err := io.Copy(w, r)
if err != nil {
err = fmt.Errorf("copying data to cache: %w", err)
}
if c, ok := r.(io.Closer); ok {
if cErr := c.Close(); cErr != nil {
err = multierror.Append(err, fmt.Errorf("closing source stream: %w", cErr))
}
}
if cErr := w.Close(); cErr != nil {
err = multierror.Append(err, fmt.Errorf("closing cache writer: %w", cErr))
}
return err
}
func newFSCache(name, cacheSize, cacheFolder string, maxItems int) (fscache.Cache, error) {
size, err := humanize.ParseBytes(cacheSize)
if err != nil {
log.Error("Invalid cache size. Using default size", "cache", name, "size", cacheSize,
"defaultSize", humanize.Bytes(consts.DefaultCacheSize))
size = consts.DefaultCacheSize
}
if size == 0 {
log.Warn(fmt.Sprintf("%s cache disabled", name))
return nil, nil
}
lru := NewFileHaunter(name, maxItems, size, consts.DefaultCacheCleanUpInterval)
h := fscache.NewLRUHaunterStrategy(lru)
cacheFolder = filepath.Join(conf.Server.CacheFolder, cacheFolder)
var fs *spreadFS
log.Info(fmt.Sprintf("Creating %s cache", name), "path", cacheFolder, "maxSize", humanize.Bytes(size))
fs, err = NewSpreadFS(cacheFolder, 0755)
if err != nil {
log.Error(fmt.Sprintf("Error initializing %s cache FS", name), err)
return nil, err
}
ck, err := fscache.NewCacheWithHaunter(fs, h)
if err != nil {
log.Error(fmt.Sprintf("Error initializing %s cache", name), err)
return nil, err
}
ck.SetKeyMapper(fs.KeyMapper)
return ck, nil
}

150
utils/cache/file_caches_test.go vendored Normal file
View File

@@ -0,0 +1,150 @@
package cache
import (
"context"
"errors"
"io"
"os"
"path/filepath"
"strings"
"github.com/navidrome/navidrome/conf"
"github.com/navidrome/navidrome/conf/configtest"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
// Call NewFileCache and wait for it to be ready
func callNewFileCache(name, cacheSize, cacheFolder string, maxItems int, getReader ReadFunc) *fileCache {
fc := NewFileCache(name, cacheSize, cacheFolder, maxItems, getReader).(*fileCache)
Eventually(func() bool { return fc.ready.Load() }).Should(BeTrue())
return fc
}
var _ = Describe("File Caches", func() {
BeforeEach(func() {
tmpDir, _ := os.MkdirTemp("", "file_caches")
DeferCleanup(func() {
configtest.SetupConfig()
_ = os.RemoveAll(tmpDir)
})
conf.Server.CacheFolder = tmpDir
})
Describe("NewFileCache", func() {
It("creates the cache folder", func() {
Expect(callNewFileCache("test", "1k", "test", 0, nil)).ToNot(BeNil())
_, err := os.Stat(filepath.Join(conf.Server.CacheFolder, "test"))
Expect(os.IsNotExist(err)).To(BeFalse())
})
It("creates the cache folder with invalid size", func() {
fc := callNewFileCache("test", "abc", "test", 0, nil)
Expect(fc.cache).ToNot(BeNil())
Expect(fc.disabled).To(BeFalse())
})
It("returns empty if cache size is '0'", func() {
fc := callNewFileCache("test", "0", "test", 0, nil)
Expect(fc.cache).To(BeNil())
Expect(fc.disabled).To(BeTrue())
})
It("reports when cache is disabled", func() {
fc := callNewFileCache("test", "0", "test", 0, nil)
Expect(fc.Disabled(context.Background())).To(BeTrue())
fc = callNewFileCache("test", "1KB", "test", 0, nil)
Expect(fc.Disabled(context.Background())).To(BeFalse())
})
})
Describe("FileCache", func() {
It("caches data if cache is enabled", func() {
called := false
fc := callNewFileCache("test", "1KB", "test", 0, func(ctx context.Context, arg Item) (io.Reader, error) {
called = true
return strings.NewReader(arg.Key()), nil
})
// First call is a MISS
s, err := fc.Get(context.Background(), &testArg{"test"})
Expect(err).To(BeNil())
Expect(s.Cached).To(BeFalse())
Expect(s.Closer).To(BeNil())
Expect(io.ReadAll(s)).To(Equal([]byte("test")))
// Second call is a HIT
called = false
s, err = fc.Get(context.Background(), &testArg{"test"})
Expect(err).To(BeNil())
Expect(io.ReadAll(s)).To(Equal([]byte("test")))
Expect(s.Cached).To(BeTrue())
Expect(s.Closer).ToNot(BeNil())
Expect(called).To(BeFalse())
})
It("does not cache data if cache is disabled", func() {
called := false
fc := callNewFileCache("test", "0", "test", 0, func(ctx context.Context, arg Item) (io.Reader, error) {
called = true
return strings.NewReader(arg.Key()), nil
})
// First call is a MISS
s, err := fc.Get(context.Background(), &testArg{"test"})
Expect(err).To(BeNil())
Expect(s.Cached).To(BeFalse())
Expect(io.ReadAll(s)).To(Equal([]byte("test")))
// Second call is also a MISS
called = false
s, err = fc.Get(context.Background(), &testArg{"test"})
Expect(err).To(BeNil())
Expect(io.ReadAll(s)).To(Equal([]byte("test")))
Expect(s.Cached).To(BeFalse())
Expect(called).To(BeTrue())
})
Context("reader errors", func() {
When("creating a reader fails", func() {
It("does not cache", func() {
fc := callNewFileCache("test", "1KB", "test", 0, func(ctx context.Context, arg Item) (io.Reader, error) {
return nil, errors.New("failed")
})
_, err := fc.Get(context.Background(), &testArg{"test"})
Expect(err).To(MatchError("failed"))
})
})
When("reader returns error", func() {
It("does not cache", func() {
fc := callNewFileCache("test", "1KB", "test", 0, func(ctx context.Context, arg Item) (io.Reader, error) {
return errFakeReader{errors.New("read failure")}, nil
})
s, err := fc.Get(context.Background(), &testArg{"test"})
Expect(err).ToNot(HaveOccurred())
_, _ = io.Copy(io.Discard, s)
// TODO How to make the fscache reader return the underlying reader error?
//Expect(err).To(MatchError("read failure"))
// Data should not be cached (or eventually be removed from cache)
Eventually(func() bool {
s, _ = fc.Get(context.Background(), &testArg{"test"})
if s != nil {
return s.Cached
}
return false
}).Should(BeFalse())
})
})
})
})
})
type testArg struct{ s string }
func (t *testArg) Key() string { return t.s }
type errFakeReader struct{ err error }
func (e errFakeReader) Read([]byte) (int, error) { return 0, e.err }

130
utils/cache/file_haunter.go vendored Normal file
View File

@@ -0,0 +1,130 @@
package cache
import (
"sort"
"time"
"github.com/djherbis/fscache"
"github.com/dustin/go-humanize"
"github.com/navidrome/navidrome/log"
)
type haunterKV struct {
key string
value fscache.Entry
info fscache.FileInfo
}
// NewFileHaunter returns a simple haunter which runs every "period"
// and scrubs older files when the total file size is over maxSize or
// total item count is over maxItems. It also removes empty (invalid) files.
// If maxItems or maxSize are 0, they won't be checked
//
// Based on fscache.NewLRUHaunter
func NewFileHaunter(name string, maxItems int, maxSize uint64, period time.Duration) fscache.LRUHaunter {
return &fileHaunter{
name: name,
period: period,
maxItems: maxItems,
maxSize: maxSize,
}
}
type fileHaunter struct {
name string
period time.Duration
maxItems int
maxSize uint64
}
func (j *fileHaunter) Next() time.Duration {
return j.period
}
func (j *fileHaunter) Scrub(c fscache.CacheAccessor) (keysToReap []string) {
var count int
var size uint64
var okFiles []haunterKV
log.Trace("Running cache cleanup", "cache", j.name, "maxSize", humanize.Bytes(j.maxSize))
c.EnumerateEntries(func(key string, e fscache.Entry) bool {
if e.InUse() {
return true
}
fileInfo, err := c.Stat(e.Name())
if err != nil {
return true
}
if fileInfo.Size() == 0 {
log.Trace("Removing invalid empty file", "file", e.Name())
keysToReap = append(keysToReap, key)
}
count++
size = size + uint64(fileInfo.Size())
okFiles = append(okFiles, haunterKV{
key: key,
value: e,
info: fileInfo,
})
return true
})
sort.Slice(okFiles, func(i, j int) bool {
iLastRead := okFiles[i].info.AccessTime()
jLastRead := okFiles[j].info.AccessTime()
return iLastRead.Before(jLastRead)
})
collectKeysToReapFn := func() bool {
var key *string
var err error
key, count, size, err = j.removeFirst(&okFiles, count, size)
if err != nil {
return false
}
if key != nil {
keysToReap = append(keysToReap, *key)
}
return true
}
log.Trace("Current cache stats", "cache", j.name, "size", humanize.Bytes(size), "numItems", count)
if j.maxItems > 0 {
for count > j.maxItems {
if !collectKeysToReapFn() {
break
}
}
}
if j.maxSize > 0 {
for size > j.maxSize {
if !collectKeysToReapFn() {
break
}
}
}
if len(keysToReap) > 0 {
log.Trace("Removing items from cache", "cache", j.name, "numItems", len(keysToReap))
}
return keysToReap
}
func (j *fileHaunter) removeFirst(items *[]haunterKV, count int, size uint64) (*string, int, uint64, error) {
var f haunterKV
f, *items = (*items)[0], (*items)[1:]
count--
size = size - uint64(f.info.Size())
return &f.key, count, size, nil
}

102
utils/cache/file_haunter_test.go vendored Normal file
View File

@@ -0,0 +1,102 @@
package cache_test
import (
"errors"
"fmt"
"io"
"os"
"path/filepath"
"time"
"github.com/djherbis/fscache"
"github.com/navidrome/navidrome/utils/cache"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var _ = Describe("FileHaunter", func() {
var fs fscache.FileSystem
var fsCache *fscache.FSCache
var cacheDir string
var err error
var maxItems int
var maxSize uint64
JustBeforeEach(func() {
tempDir, _ := os.MkdirTemp("", "spread_fs")
cacheDir = filepath.Join(tempDir, "cache1")
fs, err = fscache.NewFs(cacheDir, 0700)
Expect(err).ToNot(HaveOccurred())
DeferCleanup(func() { _ = os.RemoveAll(tempDir) })
fsCache, err = fscache.NewCacheWithHaunter(fs, fscache.NewLRUHaunterStrategy(
cache.NewFileHaunter("", maxItems, maxSize, 300*time.Millisecond),
))
Expect(err).ToNot(HaveOccurred())
DeferCleanup(fsCache.Clean)
Expect(createTestFiles(fsCache)).To(Succeed())
<-time.After(400 * time.Millisecond)
})
Context("When maxSize is defined", func() {
BeforeEach(func() {
maxSize = 20
})
It("removes files", func() {
Expect(os.ReadDir(cacheDir)).To(HaveLen(4))
Expect(fsCache.Exists("stream-5")).To(BeFalse(), "stream-5 (empty file) should have been scrubbed")
// TODO Fix flaky tests
//Expect(fsCache.Exists("stream-0")).To(BeFalse(), "stream-0 should have been scrubbed")
})
})
XContext("When maxItems is defined", func() {
BeforeEach(func() {
maxItems = 3
})
It("removes files", func() {
Expect(os.ReadDir(cacheDir)).To(HaveLen(maxItems))
Expect(fsCache.Exists("stream-5")).To(BeFalse(), "stream-5 (empty file) should have been scrubbed")
// TODO Fix flaky tests
//Expect(fsCache.Exists("stream-0")).To(BeFalse(), "stream-0 should have been scrubbed")
//Expect(fsCache.Exists("stream-1")).To(BeFalse(), "stream-1 should have been scrubbed")
})
})
})
func createTestFiles(c *fscache.FSCache) error {
// Create 5 normal files and 1 empty
for i := 0; i < 6; i++ {
name := fmt.Sprintf("stream-%v", i)
var r fscache.ReadAtCloser
if i < 5 {
r = createCachedStream(c, name, "hello")
} else { // Last one is empty
r = createCachedStream(c, name, "")
}
if !c.Exists(name) {
return errors.New(name + " should exist")
}
<-time.After(10 * time.Millisecond)
err := r.Close()
if err != nil {
return err
}
}
return nil
}
func createCachedStream(c *fscache.FSCache, name string, contents string) fscache.ReadAtCloser {
r, w, _ := c.Get(name)
_, _ = w.Write([]byte(contents))
_ = w.Close()
_, _ = io.Copy(io.Discard, r)
return r
}

153
utils/cache/simple_cache.go vendored Normal file
View File

@@ -0,0 +1,153 @@
package cache
import (
"context"
"errors"
"fmt"
"runtime"
"sync/atomic"
"time"
"github.com/jellydator/ttlcache/v3"
. "github.com/navidrome/navidrome/utils/gg"
)
type SimpleCache[K comparable, V any] interface {
Add(key K, value V) error
AddWithTTL(key K, value V, ttl time.Duration) error
Get(key K) (V, error)
GetWithLoader(key K, loader func(key K) (V, time.Duration, error)) (V, error)
Keys() []K
Values() []V
Len() int
OnExpiration(fn func(K, V)) func()
}
type Options struct {
SizeLimit uint64
DefaultTTL time.Duration
}
func NewSimpleCache[K comparable, V any](options ...Options) SimpleCache[K, V] {
opts := []ttlcache.Option[K, V]{
ttlcache.WithDisableTouchOnHit[K, V](),
}
if len(options) > 0 {
o := options[0]
if o.SizeLimit > 0 {
opts = append(opts, ttlcache.WithCapacity[K, V](o.SizeLimit))
}
if o.DefaultTTL > 0 {
opts = append(opts, ttlcache.WithTTL[K, V](o.DefaultTTL))
}
}
c := ttlcache.New[K, V](opts...)
cache := &simpleCache[K, V]{
data: c,
}
go cache.data.Start()
// Automatic cleanup to prevent goroutine leak when cache is garbage collected
runtime.AddCleanup(cache, func(ttlCache *ttlcache.Cache[K, V]) {
ttlCache.Stop()
}, cache.data)
return cache
}
const evictionTimeout = 1 * time.Hour
type simpleCache[K comparable, V any] struct {
data *ttlcache.Cache[K, V]
evictionDeadline atomic.Pointer[time.Time]
}
func (c *simpleCache[K, V]) Add(key K, value V) error {
c.evictExpired()
return c.AddWithTTL(key, value, ttlcache.DefaultTTL)
}
func (c *simpleCache[K, V]) AddWithTTL(key K, value V, ttl time.Duration) error {
c.evictExpired()
item := c.data.Set(key, value, ttl)
if item == nil {
return errors.New("failed to add item")
}
return nil
}
func (c *simpleCache[K, V]) Get(key K) (V, error) {
item := c.data.Get(key)
if item == nil {
var zero V
return zero, errors.New("item not found")
}
return item.Value(), nil
}
func (c *simpleCache[K, V]) GetWithLoader(key K, loader func(key K) (V, time.Duration, error)) (V, error) {
var err error
loaderWrapper := ttlcache.LoaderFunc[K, V](
func(t *ttlcache.Cache[K, V], key K) *ttlcache.Item[K, V] {
c.evictExpired()
var value V
var ttl time.Duration
value, ttl, err = loader(key)
if err != nil {
return nil
}
return t.Set(key, value, ttl)
},
)
item := c.data.Get(key, ttlcache.WithLoader[K, V](loaderWrapper))
if item == nil {
var zero V
if err != nil {
return zero, fmt.Errorf("cache error: loader returned %w", err)
}
return zero, errors.New("item not found")
}
return item.Value(), nil
}
func (c *simpleCache[K, V]) evictExpired() {
if c.evictionDeadline.Load() == nil || c.evictionDeadline.Load().Before(time.Now()) {
c.data.DeleteExpired()
c.evictionDeadline.Store(P(time.Now().Add(evictionTimeout)))
}
}
func (c *simpleCache[K, V]) Keys() []K {
res := make([]K, 0, c.data.Len())
c.data.Range(func(item *ttlcache.Item[K, V]) bool {
if !item.IsExpired() {
res = append(res, item.Key())
}
return true
})
return res
}
func (c *simpleCache[K, V]) Values() []V {
res := make([]V, 0, c.data.Len())
c.data.Range(func(item *ttlcache.Item[K, V]) bool {
if !item.IsExpired() {
res = append(res, item.Value())
}
return true
})
return res
}
func (c *simpleCache[K, V]) Len() int {
return c.data.Len()
}
func (c *simpleCache[K, V]) OnExpiration(fn func(K, V)) func() {
return c.data.OnEviction(func(_ context.Context, reason ttlcache.EvictionReason, item *ttlcache.Item[K, V]) {
if reason == ttlcache.EvictionReasonExpired {
fn(item.Key(), item.Value())
}
})
}

161
utils/cache/simple_cache_test.go vendored Normal file
View File

@@ -0,0 +1,161 @@
package cache
import (
"errors"
"fmt"
"time"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var _ = Describe("SimpleCache", func() {
var (
cache SimpleCache[string, string]
)
BeforeEach(func() {
cache = NewSimpleCache[string, string]()
})
Describe("Add and Get", func() {
It("should add and retrieve a value", func() {
err := cache.Add("key", "value")
Expect(err).NotTo(HaveOccurred())
value, err := cache.Get("key")
Expect(err).NotTo(HaveOccurred())
Expect(value).To(Equal("value"))
})
})
Describe("AddWithTTL and Get", func() {
It("should add a value with TTL and retrieve it", func() {
err := cache.AddWithTTL("key", "value", 1*time.Minute)
Expect(err).NotTo(HaveOccurred())
value, err := cache.Get("key")
Expect(err).NotTo(HaveOccurred())
Expect(value).To(Equal("value"))
})
It("should not retrieve a value after its TTL has expired", func() {
err := cache.AddWithTTL("key", "value", 10*time.Millisecond)
Expect(err).NotTo(HaveOccurred())
time.Sleep(50 * time.Millisecond)
_, err = cache.Get("key")
Expect(err).To(HaveOccurred())
})
})
Describe("GetWithLoader", func() {
It("should retrieve a value using the loader function", func() {
loader := func(key string) (string, time.Duration, error) {
return fmt.Sprintf("%s=value", key), 1 * time.Minute, nil
}
value, err := cache.GetWithLoader("key", loader)
Expect(err).NotTo(HaveOccurred())
Expect(value).To(Equal("key=value"))
})
It("should return the error returned by the loader function", func() {
loader := func(key string) (string, time.Duration, error) {
return "", 0, errors.New("some error")
}
_, err := cache.GetWithLoader("key", loader)
Expect(err).To(HaveOccurred())
})
})
Describe("Keys and Values", func() {
It("should return all keys and all values", func() {
err := cache.Add("key1", "value1")
Expect(err).NotTo(HaveOccurred())
err = cache.Add("key2", "value2")
Expect(err).NotTo(HaveOccurred())
keys := cache.Keys()
Expect(keys).To(ConsistOf("key1", "key2"))
values := cache.Values()
Expect(values).To(ConsistOf("value1", "value2"))
})
Context("when there are expired items in the cache", func() {
It("should not return expired items", func() {
Expect(cache.Add("key0", "value0")).To(Succeed())
for i := 1; i <= 3; i++ {
err := cache.AddWithTTL(fmt.Sprintf("key%d", i), fmt.Sprintf("value%d", i), 10*time.Millisecond)
Expect(err).NotTo(HaveOccurred())
}
time.Sleep(50 * time.Millisecond)
Expect(cache.Keys()).To(ConsistOf("key0"))
Expect(cache.Values()).To(ConsistOf("value0"))
})
})
})
Describe("Options", func() {
Context("when size limit is set", func() {
BeforeEach(func() {
cache = NewSimpleCache[string, string](Options{
SizeLimit: 2,
})
})
It("should drop the oldest item when the size limit is reached", func() {
for i := 1; i <= 3; i++ {
err := cache.Add(fmt.Sprintf("key%d", i), fmt.Sprintf("value%d", i))
Expect(err).NotTo(HaveOccurred())
}
Expect(cache.Keys()).To(ConsistOf("key2", "key3"))
})
})
Context("when default TTL is set", func() {
BeforeEach(func() {
cache = NewSimpleCache[string, string](Options{
DefaultTTL: 10 * time.Millisecond,
})
})
It("should expire items after the default TTL", func() {
Expect(cache.AddWithTTL("key0", "value0", 1*time.Minute)).To(Succeed())
for i := 1; i <= 3; i++ {
err := cache.Add(fmt.Sprintf("key%d", i), fmt.Sprintf("value%d", i))
Expect(err).NotTo(HaveOccurred())
}
time.Sleep(50 * time.Millisecond)
for i := 1; i <= 3; i++ {
_, err := cache.Get(fmt.Sprintf("key%d", i))
Expect(err).To(HaveOccurred())
}
Expect(cache.Get("key0")).To(Equal("value0"))
})
})
Describe("OnExpiration", func() {
It("should call callback when item expires", func() {
cache = NewSimpleCache[string, string]()
expired := make(chan struct{})
cache.OnExpiration(func(k, v string) { close(expired) })
Expect(cache.AddWithTTL("key", "value", 10*time.Millisecond)).To(Succeed())
select {
case <-expired:
case <-time.After(100 * time.Millisecond):
Fail("expiration callback not called")
}
})
})
})
})

110
utils/cache/spread_fs.go vendored Normal file
View File

@@ -0,0 +1,110 @@
package cache
import (
"crypto/sha1"
"fmt"
"io/fs"
"os"
"path/filepath"
"strings"
"github.com/djherbis/atime"
"github.com/djherbis/fscache"
"github.com/djherbis/stream"
"github.com/navidrome/navidrome/log"
)
type spreadFS struct {
root string
mode os.FileMode
init func() error
}
// NewSpreadFS returns a FileSystem rooted at directory dir. This FS hashes the key and
// distributes all files in a layout like XX/XX/XXXXXXXXXX. Ex:
//
// Key is abc123.300x300.jpg
// Hash would be: c574aeb3caafcf93ee337f0cf34e31a428ba3f13
// File in cache would be: c5 / 74 / c574aeb3caafcf93ee337f0cf34e31a428ba3f13
//
// The idea is to avoid having too many files in one dir, which could potentially cause performance issues
// and may hit limitations depending on the OS.
// See discussion here: https://github.com/djherbis/fscache/issues/8#issuecomment-614319323
//
// dir is created with specified mode if it doesn't exist.
func NewSpreadFS(dir string, mode os.FileMode) (*spreadFS, error) {
f := &spreadFS{root: dir, mode: mode, init: func() error {
return os.MkdirAll(dir, mode)
}}
return f, f.init()
}
func (sfs *spreadFS) Reload(f func(key string, name string)) error {
count := 0
err := filepath.WalkDir(sfs.root, func(absoluteFilePath string, de fs.DirEntry, err error) error {
if err != nil {
log.Error("Error loading cache", "dir", sfs.root, err)
}
path, err := filepath.Rel(sfs.root, absoluteFilePath)
if err != nil {
return nil //nolint:nilerr
}
// Skip if name is not in the format XX/XX/XXXXXXXXXXXX
parts := strings.Split(path, string(os.PathSeparator))
if len(parts) != 3 || len(parts[0]) != 2 || len(parts[1]) != 2 || len(parts[2]) != 40 {
return nil
}
f(absoluteFilePath, absoluteFilePath)
count++
return nil
})
if err == nil {
log.Debug("Loaded cache", "dir", sfs.root, "numItems", count)
}
return err
}
func (sfs *spreadFS) Create(name string) (stream.File, error) {
path := filepath.Dir(name)
err := os.MkdirAll(path, sfs.mode)
if err != nil {
return nil, err
}
return os.OpenFile(name, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0600)
}
func (sfs *spreadFS) Open(name string) (stream.File, error) {
return os.Open(name)
}
func (sfs *spreadFS) Remove(name string) error {
return os.Remove(name)
}
func (sfs *spreadFS) Stat(name string) (fscache.FileInfo, error) {
stat, err := os.Stat(name)
if err != nil {
return fscache.FileInfo{}, err
}
return fscache.FileInfo{FileInfo: stat, Atime: atime.Get(stat)}, nil
}
func (sfs *spreadFS) RemoveAll() error {
if err := os.RemoveAll(sfs.root); err != nil {
return err
}
return sfs.init()
}
func (sfs *spreadFS) KeyMapper(key string) string {
// When running the Haunter, fscache can call this KeyMapper with the cached filepath instead of the key.
// That's because we don't inform the original cache keys when reloading in the Reload function above.
// If that's the case, just return the file path, as it is the actual mapped key.
if strings.HasPrefix(key, sfs.root) {
return key
}
hash := fmt.Sprintf("%x", sha1.Sum([]byte(key)))
return filepath.Join(sfs.root, hash[0:2], hash[2:4], hash)
}

69
utils/cache/spread_fs_test.go vendored Normal file
View File

@@ -0,0 +1,69 @@
package cache
import (
"os"
"path/filepath"
"strings"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var _ = Describe("Spread FS", func() {
var fs *spreadFS
var rootDir string
BeforeEach(func() {
var err error
rootDir, _ = os.MkdirTemp("", "spread_fs")
fs, err = NewSpreadFS(rootDir, 0755)
Expect(err).To(BeNil())
})
AfterEach(func() {
_ = os.RemoveAll(rootDir)
})
Describe("KeyMapper", func() {
It("creates a file with proper name format", func() {
mapped := fs.KeyMapper("abc")
Expect(mapped).To(HavePrefix(fs.root))
mapped = strings.TrimPrefix(mapped, fs.root)
parts := strings.Split(mapped, string(filepath.Separator))
Expect(parts).To(HaveLen(4))
Expect(parts[3]).To(HaveLen(40))
})
It("returns the unmodified key if it is a cache file path", func() {
mapped := fs.KeyMapper("abc")
Expect(mapped).To(HavePrefix(fs.root))
Expect(fs.KeyMapper(mapped)).To(Equal(mapped))
})
})
Describe("Reload", func() {
var files []string
BeforeEach(func() {
files = []string{"aaaaa", "bbbbb", "ccccc"}
for _, content := range files {
file := fs.KeyMapper(content)
f, err := fs.Create(file)
Expect(err).To(BeNil())
_, _ = f.Write([]byte(content))
_ = f.Close()
}
})
It("loads all files from fs", func() {
var actual []string
err := fs.Reload(func(key string, name string) {
Expect(key).To(Equal(name))
data, err := os.ReadFile(name)
Expect(err).To(BeNil())
actual = append(actual, string(data))
})
Expect(err).To(BeNil())
Expect(actual).To(HaveLen(len(files)))
Expect(actual).To(ContainElements(files[0], files[1], files[2]))
})
})
})

34
utils/chrono/meter.go Normal file
View File

@@ -0,0 +1,34 @@
package chrono
import (
"time"
. "github.com/navidrome/navidrome/utils/gg"
)
// Meter is a simple stopwatch
type Meter struct {
elapsed time.Duration
mark *time.Time
}
func (m *Meter) Start() {
m.mark = P(time.Now())
}
func (m *Meter) Stop() time.Duration {
if m.mark == nil {
return m.elapsed
}
m.elapsed += time.Since(*m.mark)
m.mark = nil
return m.elapsed
}
func (m *Meter) Elapsed() time.Duration {
elapsed := m.elapsed
if m.mark != nil {
elapsed += time.Since(*m.mark)
}
return elapsed
}

View File

@@ -0,0 +1,91 @@
package chrono_test
import (
"testing"
"time"
"github.com/navidrome/navidrome/tests"
. "github.com/navidrome/navidrome/utils/chrono"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
func TestChrono(t *testing.T) {
tests.Init(t, false)
RegisterFailHandler(Fail)
RunSpecs(t, "Chrono Suite")
}
// Note: These tests use longer sleep durations and generous tolerances to avoid flakiness
// due to system scheduling delays. For a more elegant approach in the future, consider
// using Go 1.24's experimental testing/synctest package with GOEXPERIMENT=synctest.
var _ = Describe("Meter", func() {
var meter *Meter
BeforeEach(func() {
meter = &Meter{}
})
Describe("Stop", func() {
It("should return the elapsed time", func() {
meter.Start()
time.Sleep(50 * time.Millisecond)
elapsed := meter.Stop()
// Use generous tolerance to account for system scheduling delays
Expect(elapsed).To(BeNumerically(">=", 30*time.Millisecond))
Expect(elapsed).To(BeNumerically("<=", 200*time.Millisecond))
})
It("should accumulate elapsed time on multiple starts and stops", func() {
// First cycle
meter.Start()
time.Sleep(50 * time.Millisecond)
firstElapsed := meter.Stop()
// Second cycle
meter.Start()
time.Sleep(50 * time.Millisecond)
totalElapsed := meter.Stop()
// Test that time accumulates (second measurement should be greater than first)
Expect(totalElapsed).To(BeNumerically(">", firstElapsed))
// Test that accumulated time is reasonable (should be roughly double the first)
Expect(totalElapsed).To(BeNumerically(">=", time.Duration(float64(firstElapsed)*1.5)))
Expect(totalElapsed).To(BeNumerically("<=", firstElapsed*3))
// Sanity check: total should be at least 60ms (allowing for some timing variance)
Expect(totalElapsed).To(BeNumerically(">=", 60*time.Millisecond))
})
})
Describe("Elapsed", func() {
It("should return the total elapsed time", func() {
meter.Start()
time.Sleep(50 * time.Millisecond)
meter.Stop()
// Should not count the time the meter was stopped
time.Sleep(50 * time.Millisecond)
meter.Start()
time.Sleep(50 * time.Millisecond)
meter.Stop()
elapsed := meter.Elapsed()
// Should be roughly 100ms (2 x 50ms), but allow for significant variance
Expect(elapsed).To(BeNumerically(">=", 60*time.Millisecond))
Expect(elapsed).To(BeNumerically("<=", 300*time.Millisecond))
})
It("should include the current running time if started", func() {
meter.Start()
time.Sleep(50 * time.Millisecond)
elapsed := meter.Elapsed()
// Use generous tolerance to account for system scheduling delays
Expect(elapsed).To(BeNumerically(">=", 30*time.Millisecond))
Expect(elapsed).To(BeNumerically("<=", 200*time.Millisecond))
})
})
})

12
utils/context.go Normal file
View File

@@ -0,0 +1,12 @@
package utils
import "context"
func IsCtxDone(ctx context.Context) bool {
select {
case <-ctx.Done():
return true
default:
return false
}
}

23
utils/context_test.go Normal file
View File

@@ -0,0 +1,23 @@
package utils_test
import (
"context"
"github.com/navidrome/navidrome/utils"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var _ = Describe("IsCtxDone", func() {
It("returns false if the context is not done", func() {
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
Expect(utils.IsCtxDone(ctx)).To(BeFalse())
})
It("returns true if the context is done", func() {
ctx, cancel := context.WithCancel(context.Background())
cancel()
Expect(utils.IsCtxDone(ctx)).To(BeTrue())
})
})

72
utils/encrypt.go Normal file
View File

@@ -0,0 +1,72 @@
package utils
import (
"context"
"crypto/aes"
"crypto/cipher"
"crypto/rand"
"encoding/base64"
"errors"
"io"
"github.com/navidrome/navidrome/log"
)
func Encrypt(ctx context.Context, encKey []byte, data string) (string, error) {
plaintext := []byte(data)
block, err := aes.NewCipher(encKey)
if err != nil {
log.Error(ctx, "Could not create a cipher", err)
return "", err
}
aesGCM, err := cipher.NewGCM(block)
if err != nil {
log.Error(ctx, "Could not create a GCM", "user", err)
return "", err
}
nonce := make([]byte, aesGCM.NonceSize())
if _, err = io.ReadFull(rand.Reader, nonce); err != nil {
log.Error(ctx, "Could generate nonce", err)
return "", err
}
ciphertext := aesGCM.Seal(nonce, nonce, plaintext, nil)
return base64.StdEncoding.EncodeToString(ciphertext), nil
}
func Decrypt(ctx context.Context, encKey []byte, encData string) (value string, err error) {
// Recover from any panics
defer func() {
if r := recover(); r != nil {
err = errors.New("decryption panicked")
}
}()
enc, _ := base64.StdEncoding.DecodeString(encData)
block, err := aes.NewCipher(encKey)
if err != nil {
log.Error(ctx, "Could not create a cipher", err)
return "", err
}
aesGCM, err := cipher.NewGCM(block)
if err != nil {
log.Error(ctx, "Could not create a GCM", err)
return "", err
}
nonceSize := aesGCM.NonceSize()
nonce, ciphertext := enc[:nonceSize], enc[nonceSize:]
plaintext, err := aesGCM.Open(nil, nonce, ciphertext, nil)
if err != nil {
log.Error(ctx, "Could not decrypt password", err)
return "", err
}
return string(plaintext), nil
}

38
utils/encrypt_test.go Normal file
View File

@@ -0,0 +1,38 @@
package utils
import (
"context"
"crypto/sha256"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var _ = Describe("encrypt", func() {
It("decrypts correctly when using the same encryption key", func() {
sum := sha256.Sum256([]byte("password"))
encKey := sum[0:]
data := "Can you keep a secret?"
encrypted, err := Encrypt(context.Background(), encKey, data)
Expect(err).ToNot(HaveOccurred())
decrypted, err := Decrypt(context.Background(), encKey, encrypted)
Expect(err).ToNot(HaveOccurred())
Expect(decrypted).To(Equal(data))
})
It("fails to decrypt if not using the same encryption key", func() {
sum := sha256.Sum256([]byte("password"))
encKey := sum[0:]
data := "Can you keep a secret?"
encrypted, err := Encrypt(context.Background(), encKey, data)
Expect(err).ToNot(HaveOccurred())
sum = sha256.Sum256([]byte("different password"))
encKey = sum[0:]
_, err = Decrypt(context.Background(), encKey, encrypted)
Expect(err).To(MatchError("cipher: message authentication failed"))
})
})

25
utils/files.go Normal file
View File

@@ -0,0 +1,25 @@
package utils
import (
"os"
"path"
"path/filepath"
"strings"
"github.com/navidrome/navidrome/model/id"
)
func TempFileName(prefix, suffix string) string {
return filepath.Join(os.TempDir(), prefix+id.NewRandom()+suffix)
}
func BaseName(filePath string) string {
p := path.Base(filePath)
return strings.TrimSuffix(p, path.Ext(p))
}
// FileExists checks if a file or directory exists
func FileExists(path string) bool {
_, err := os.Stat(path)
return err == nil || !os.IsNotExist(err)
}

178
utils/files_test.go Normal file
View File

@@ -0,0 +1,178 @@
package utils_test
import (
"os"
"path/filepath"
"strings"
"github.com/navidrome/navidrome/utils"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var _ = Describe("TempFileName", func() {
It("creates a temporary file name with prefix and suffix", func() {
prefix := "test-"
suffix := ".tmp"
result := utils.TempFileName(prefix, suffix)
Expect(result).To(ContainSubstring(prefix))
Expect(result).To(HaveSuffix(suffix))
Expect(result).To(ContainSubstring(os.TempDir()))
})
It("creates unique file names on multiple calls", func() {
prefix := "unique-"
suffix := ".test"
result1 := utils.TempFileName(prefix, suffix)
result2 := utils.TempFileName(prefix, suffix)
Expect(result1).NotTo(Equal(result2))
})
It("handles empty prefix and suffix", func() {
result := utils.TempFileName("", "")
Expect(result).To(ContainSubstring(os.TempDir()))
Expect(len(result)).To(BeNumerically(">", len(os.TempDir())))
})
It("creates proper file path separators", func() {
prefix := "path-test-"
suffix := ".ext"
result := utils.TempFileName(prefix, suffix)
expectedDir := os.TempDir()
Expect(result).To(HavePrefix(expectedDir))
Expect(strings.Count(result, string(filepath.Separator))).To(BeNumerically(">=", strings.Count(expectedDir, string(filepath.Separator))))
})
})
var _ = Describe("BaseName", func() {
It("extracts basename from a simple filename", func() {
result := utils.BaseName("test.mp3")
Expect(result).To(Equal("test"))
})
It("extracts basename from a file path", func() {
result := utils.BaseName("/path/to/file.txt")
Expect(result).To(Equal("file"))
})
It("handles files without extension", func() {
result := utils.BaseName("/path/to/filename")
Expect(result).To(Equal("filename"))
})
It("handles files with multiple dots", func() {
result := utils.BaseName("archive.tar.gz")
Expect(result).To(Equal("archive.tar"))
})
It("handles hidden files", func() {
// For hidden files without additional extension, path.Ext returns the entire name
// So basename becomes empty string after TrimSuffix
result := utils.BaseName(".hidden")
Expect(result).To(Equal(""))
})
It("handles hidden files with extension", func() {
result := utils.BaseName(".config.json")
Expect(result).To(Equal(".config"))
})
It("handles empty string", func() {
// The actual behavior returns empty string for empty input
result := utils.BaseName("")
Expect(result).To(Equal(""))
})
It("handles path ending with separator", func() {
result := utils.BaseName("/path/to/dir/")
Expect(result).To(Equal("dir"))
})
It("handles complex nested path", func() {
result := utils.BaseName("/very/long/path/to/my/favorite/song.mp3")
Expect(result).To(Equal("song"))
})
})
var _ = Describe("FileExists", func() {
var tempFile *os.File
var tempDir string
BeforeEach(func() {
var err error
tempFile, err = os.CreateTemp("", "fileexists-test-*.txt")
Expect(err).NotTo(HaveOccurred())
tempDir, err = os.MkdirTemp("", "fileexists-test-dir-*")
Expect(err).NotTo(HaveOccurred())
})
AfterEach(func() {
if tempFile != nil {
os.Remove(tempFile.Name())
tempFile.Close()
}
if tempDir != "" {
os.RemoveAll(tempDir)
}
})
It("returns true for existing file", func() {
Expect(utils.FileExists(tempFile.Name())).To(BeTrue())
})
It("returns true for existing directory", func() {
Expect(utils.FileExists(tempDir)).To(BeTrue())
})
It("returns false for non-existing file", func() {
nonExistentPath := filepath.Join(tempDir, "does-not-exist.txt")
Expect(utils.FileExists(nonExistentPath)).To(BeFalse())
})
It("returns false for empty path", func() {
Expect(utils.FileExists("")).To(BeFalse())
})
It("handles nested non-existing path", func() {
nonExistentPath := "/this/path/definitely/does/not/exist/file.txt"
Expect(utils.FileExists(nonExistentPath)).To(BeFalse())
})
Context("when file is deleted after creation", func() {
It("returns false after file deletion", func() {
filePath := tempFile.Name()
Expect(utils.FileExists(filePath)).To(BeTrue())
err := os.Remove(filePath)
Expect(err).NotTo(HaveOccurred())
tempFile = nil // Prevent cleanup attempt
Expect(utils.FileExists(filePath)).To(BeFalse())
})
})
Context("when directory is deleted after creation", func() {
It("returns false after directory deletion", func() {
dirPath := tempDir
Expect(utils.FileExists(dirPath)).To(BeTrue())
err := os.RemoveAll(dirPath)
Expect(err).NotTo(HaveOccurred())
tempDir = "" // Prevent cleanup attempt
Expect(utils.FileExists(dirPath)).To(BeFalse())
})
})
It("handles permission denied scenarios gracefully", func() {
// This test might be platform specific, but we test the general case
result := utils.FileExists("/root/.ssh/id_rsa") // Likely to not exist or be inaccessible
Expect(result).To(Or(BeTrue(), BeFalse())) // Should not panic
})
})

23
utils/gg/gg.go Normal file
View File

@@ -0,0 +1,23 @@
// Package gg implements simple "extensions" to Go language. Based on https://github.com/icza/gog
package gg
// P returns a pointer to the input value
func P[T any](v T) *T {
return &v
}
// V returns the value of the input pointer, or a zero value if the input pointer is nil.
func V[T any](p *T) T {
if p == nil {
var zero T
return zero
}
return *p
}
func If[T any](cond bool, v1, v2 T) T {
if cond {
return v1
}
return v2
}

62
utils/gg/gg_test.go Normal file
View File

@@ -0,0 +1,62 @@
package gg_test
import (
"testing"
"github.com/navidrome/navidrome/tests"
"github.com/navidrome/navidrome/utils/gg"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
func TestGG(t *testing.T) {
tests.Init(t, false)
RegisterFailHandler(Fail)
RunSpecs(t, "GG Suite")
}
var _ = Describe("GG", func() {
Describe("P", func() {
It("returns a pointer to the input value", func() {
v := 123
Expect(gg.P(123)).To(Equal(&v))
})
It("returns nil if the input value is zero", func() {
v := 0
Expect(gg.P(0)).To(Equal(&v))
})
})
Describe("V", func() {
It("returns the value of the input pointer", func() {
v := 123
Expect(gg.V(&v)).To(Equal(123))
})
It("returns a zero value if the input pointer is nil", func() {
var v *int
Expect(gg.V(v)).To(Equal(0))
})
})
Describe("If", func() {
It("returns the first value if the condition is true", func() {
Expect(gg.If(true, 1, 2)).To(Equal(1))
})
It("returns the second value if the condition is false", func() {
Expect(gg.If(false, 1, 2)).To(Equal(2))
})
It("works with string values", func() {
Expect(gg.If(true, "a", "b")).To(Equal("a"))
Expect(gg.If(false, "a", "b")).To(Equal("b"))
})
It("works with different types", func() {
Expect(gg.If(true, 1.1, 2.2)).To(Equal(1.1))
Expect(gg.If(false, 1.1, 2.2)).To(Equal(2.2))
})
})
})

View File

@@ -0,0 +1,23 @@
package gravatar
import (
"crypto/sha256"
"fmt"
"strings"
)
const baseUrl = "https://www.gravatar.com/avatar"
const defaultSize = 80
const maxSize = 2048
func Url(email string, size int) string {
email = strings.ToLower(email)
email = strings.TrimSpace(email)
hash := sha256.Sum256([]byte(email))
if size < 1 {
size = defaultSize
}
size = min(maxSize, size)
return fmt.Sprintf("%s/%x?s=%d", baseUrl, hash, size)
}

View File

@@ -0,0 +1,34 @@
package gravatar_test
import (
"testing"
"github.com/navidrome/navidrome/tests"
"github.com/navidrome/navidrome/utils/gravatar"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
func TestGravatar(t *testing.T) {
tests.Init(t, false)
RegisterFailHandler(Fail)
RunSpecs(t, "Gravatar Test Suite")
}
var _ = Describe("Gravatar", func() {
It("returns a well formatted gravatar URL", func() {
Expect(gravatar.Url("my@email.com", 100)).To(Equal("https://www.gravatar.com/avatar/cf3d8259741b19a2b09e17d4fa9a97c63adc44bf2a5fa075cdcb5491f525feaa?s=100"))
})
It("sets the default size", func() {
Expect(gravatar.Url("my@email.com", 0)).To(Equal("https://www.gravatar.com/avatar/cf3d8259741b19a2b09e17d4fa9a97c63adc44bf2a5fa075cdcb5491f525feaa?s=80"))
})
It("caps maximum size", func() {
Expect(gravatar.Url("my@email.com", 3000)).To(Equal("https://www.gravatar.com/avatar/cf3d8259741b19a2b09e17d4fa9a97c63adc44bf2a5fa075cdcb5491f525feaa?s=2048"))
})
It("ignores case", func() {
Expect(gravatar.Url("MY@email.com", 0)).To(Equal(gravatar.Url("my@email.com", 0)))
})
It("ignores spaces", func() {
Expect(gravatar.Url(" my@email.com ", 0)).To(Equal(gravatar.Url("my@email.com", 0)))
})
})

74
utils/hasher/hasher.go Normal file
View File

@@ -0,0 +1,74 @@
package hasher
import (
"hash/maphash"
"strconv"
"sync"
"github.com/navidrome/navidrome/utils/random"
)
var instance = NewHasher()
func Reseed(id string) {
instance.Reseed(id)
}
func SetSeed(id string, seed string) {
instance.SetSeed(id, seed)
}
func CurrentSeed(id string) string {
instance.mutex.RLock()
defer instance.mutex.RUnlock()
return instance.seeds[id]
}
func HashFunc() func(id, str string) uint64 {
return instance.HashFunc()
}
type Hasher struct {
seeds map[string]string
mutex sync.RWMutex
hashSeed maphash.Seed
}
func NewHasher() *Hasher {
h := new(Hasher)
h.seeds = make(map[string]string)
h.hashSeed = maphash.MakeSeed()
return h
}
// SetSeed sets a seed for the given id
func (h *Hasher) SetSeed(id string, seed string) {
h.mutex.Lock()
defer h.mutex.Unlock()
h.seeds[id] = seed
}
// Reseed generates a new random seed for the given id
func (h *Hasher) Reseed(id string) {
_ = h.reseed(id)
}
func (h *Hasher) reseed(id string) string {
seed := strconv.FormatUint(random.Uint64(), 36)
h.SetSeed(id, seed)
return seed
}
// HashFunc returns a function that hashes a string using the seed for the given id
func (h *Hasher) HashFunc() func(id, str string) uint64 {
return func(id, str string) uint64 {
h.mutex.RLock()
seed, ok := h.seeds[id]
h.mutex.RUnlock()
if !ok {
seed = h.reseed(id)
}
return maphash.Bytes(h.hashSeed, []byte(seed+str))
}
}

View File

@@ -0,0 +1,68 @@
package hasher_test
import (
"strconv"
"testing"
"github.com/navidrome/navidrome/utils/hasher"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
func TestHasher(t *testing.T) {
RegisterFailHandler(Fail)
RunSpecs(t, "Hasher Suite")
}
var _ = Describe("HashFunc", func() {
const input = "123e4567e89b12d3a456426614174000"
It("hashes the input and returns the sum", func() {
hashFunc := hasher.HashFunc()
sum := hashFunc("1", input)
Expect(sum > 0).To(BeTrue())
})
It("hashes the input, reseeds and returns a different sum", func() {
hashFunc := hasher.HashFunc()
sum := hashFunc("1", input)
hasher.Reseed("1")
sum2 := hashFunc("1", input)
Expect(sum).NotTo(Equal(sum2))
})
It("keeps different hashes for different ids", func() {
hashFunc := hasher.HashFunc()
sum := hashFunc("1", input)
sum2 := hashFunc("2", input)
Expect(sum).NotTo(Equal(sum2))
Expect(sum).To(Equal(hashFunc("1", input)))
Expect(sum2).To(Equal(hashFunc("2", input)))
})
It("keeps the same hash for the same id and seed", func() {
id := "1"
hashFunc := hasher.HashFunc()
hasher.SetSeed(id, "original_seed")
sum := hashFunc(id, input)
Expect(sum).To(Equal(hashFunc(id, input)))
hasher.Reseed(id)
Expect(sum).NotTo(Equal(hashFunc(id, input)))
hasher.SetSeed(id, "original_seed")
Expect(sum).To(Equal(hashFunc(id, input)))
})
It("does not cause race conditions", func() {
for i := 0; i < 1000; i++ {
go func() {
hashFunc := hasher.HashFunc()
sum := hashFunc(strconv.Itoa(i), input)
Expect(sum).ToNot(BeZero())
}()
}
})
})

View File

@@ -0,0 +1,38 @@
package utils
import (
"regexp"
"strings"
)
type IndexGroups map[string]string
// ParseIndexGroups
// The specification is a space-separated list of index entries. Normally, each entry is just a single character,
// but you may also specify multiple characters. For instance, the entry "The" will link to all files and
// folders starting with "The".
//
// You may also create an entry using a group of index characters in parentheses. For instance, the entry
// "A-E(ABCDE)" will display as "A-E" and link to all files and folders starting with either
// A, B, C, D or E. This may be useful for grouping less-frequently used characters (such and X, Y and Z), or
// for grouping accented characters (such as A, \u00C0 and \u00C1)
//
// Files and folders that are not covered by an index entry will be placed under the index entry "#".
var indexGroupsRx = regexp.MustCompile(`(.+)\((.+)\)`)
func ParseIndexGroups(spec string) IndexGroups {
parsed := make(IndexGroups)
split := strings.Split(spec, " ")
for _, g := range split {
sub := indexGroupsRx.FindStringSubmatch(g)
if len(sub) > 0 {
for _, c := range sub[2] {
parsed[string(c)] = sub[1]
}
} else {
parsed[g] = g
}
}
return parsed
}

View File

@@ -0,0 +1,38 @@
package utils
import (
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var _ = Describe("ParseIndexGroup", func() {
Context("Two simple entries", func() {
It("returns the entries", func() {
parsed := ParseIndexGroups("A The")
Expect(parsed).To(HaveLen(2))
Expect(parsed["A"]).To(Equal("A"))
Expect(parsed["The"]).To(Equal("The"))
})
})
Context("An entry with a group", func() {
parsed := ParseIndexGroups("A-C(ABC) Z")
It("parses the groups correctly", func() {
Expect(parsed).To(HaveLen(4))
Expect(parsed["A"]).To(Equal("A-C"))
Expect(parsed["B"]).To(Equal("A-C"))
Expect(parsed["C"]).To(Equal("A-C"))
Expect(parsed["Z"]).To(Equal("Z"))
})
})
Context("Correctly parses UTF-8", func() {
parsed := ParseIndexGroups("UTF8(宇A海)")
It("parses the groups correctly", func() {
Expect(parsed).To(HaveLen(3))
Expect(parsed["宇"]).To(Equal("UTF8"))
Expect(parsed["A"]).To(Equal("UTF8"))
Expect(parsed["海"]).To(Equal("UTF8"))
})
})
})

33
utils/ioutils/ioutils.go Normal file
View File

@@ -0,0 +1,33 @@
package ioutils
import (
"io"
"os"
"golang.org/x/text/encoding/unicode"
"golang.org/x/text/transform"
)
// UTF8Reader wraps an io.Reader to handle Byte Order Mark (BOM) properly.
// It strips UTF-8 BOM if present, and converts UTF-16 (LE/BE) to UTF-8.
// This is particularly useful for reading user-provided text files (like LRC lyrics,
// playlists) that may have been created on Windows, which often adds BOM markers.
//
// Reference: https://en.wikipedia.org/wiki/Byte_order_mark
func UTF8Reader(r io.Reader) io.Reader {
return transform.NewReader(r, unicode.BOMOverride(unicode.UTF8.NewDecoder()))
}
// UTF8ReadFile reads the named file and returns its contents as a byte slice,
// automatically handling BOM markers. It's similar to os.ReadFile but strips
// UTF-8 BOM and converts UTF-16 encoded files to UTF-8.
func UTF8ReadFile(filename string) ([]byte, error) {
file, err := os.Open(filename)
if err != nil {
return nil, err
}
defer file.Close()
reader := UTF8Reader(file)
return io.ReadAll(reader)
}

View File

@@ -0,0 +1,117 @@
package ioutils
import (
"bytes"
"io"
"testing"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
func TestIOUtils(t *testing.T) {
RegisterFailHandler(Fail)
RunSpecs(t, "IO Utils Suite")
}
var _ = Describe("UTF8Reader", func() {
Context("when reading text with UTF-8 BOM", func() {
It("strips the UTF-8 BOM marker", func() {
// UTF-8 BOM is EF BB BF
input := []byte{0xEF, 0xBB, 0xBF, 'h', 'e', 'l', 'l', 'o'}
reader := UTF8Reader(bytes.NewReader(input))
output, err := io.ReadAll(reader)
Expect(err).ToNot(HaveOccurred())
Expect(string(output)).To(Equal("hello"))
})
It("strips UTF-8 BOM from multi-line text", func() {
// Test with the actual LRC file format
input := []byte{0xEF, 0xBB, 0xBF, '[', '0', '0', ':', '0', '0', '.', '0', '0', ']', ' ', 't', 'e', 's', 't'}
reader := UTF8Reader(bytes.NewReader(input))
output, err := io.ReadAll(reader)
Expect(err).ToNot(HaveOccurred())
Expect(string(output)).To(Equal("[00:00.00] test"))
})
})
Context("when reading text without BOM", func() {
It("passes through unchanged", func() {
input := []byte("hello world")
reader := UTF8Reader(bytes.NewReader(input))
output, err := io.ReadAll(reader)
Expect(err).ToNot(HaveOccurred())
Expect(string(output)).To(Equal("hello world"))
})
})
Context("when reading UTF-16 LE encoded text", func() {
It("converts to UTF-8 and strips BOM", func() {
// UTF-16 LE BOM (FF FE) followed by "hi" in UTF-16 LE
input := []byte{0xFF, 0xFE, 'h', 0x00, 'i', 0x00}
reader := UTF8Reader(bytes.NewReader(input))
output, err := io.ReadAll(reader)
Expect(err).ToNot(HaveOccurred())
Expect(string(output)).To(Equal("hi"))
})
})
Context("when reading UTF-16 BE encoded text", func() {
It("converts to UTF-8 and strips BOM", func() {
// UTF-16 BE BOM (FE FF) followed by "hi" in UTF-16 BE
input := []byte{0xFE, 0xFF, 0x00, 'h', 0x00, 'i'}
reader := UTF8Reader(bytes.NewReader(input))
output, err := io.ReadAll(reader)
Expect(err).ToNot(HaveOccurred())
Expect(string(output)).To(Equal("hi"))
})
})
Context("when reading empty content", func() {
It("returns empty string", func() {
reader := UTF8Reader(bytes.NewReader([]byte{}))
output, err := io.ReadAll(reader)
Expect(err).ToNot(HaveOccurred())
Expect(string(output)).To(Equal(""))
})
})
})
var _ = Describe("UTF8ReadFile", func() {
Context("when reading a file with UTF-8 BOM", func() {
It("strips the BOM marker", func() {
// Use the actual fixture from issue #4631
contents, err := UTF8ReadFile("../../tests/fixtures/bom-test.lrc")
Expect(err).ToNot(HaveOccurred())
// Should NOT start with BOM
Expect(contents[0]).ToNot(Equal(byte(0xEF)))
// Should start with '['
Expect(contents[0]).To(Equal(byte('[')))
Expect(string(contents)).To(HavePrefix("[00:00.00]"))
})
})
Context("when reading a file without BOM", func() {
It("reads the file normally", func() {
contents, err := UTF8ReadFile("../../tests/fixtures/test.lrc")
Expect(err).ToNot(HaveOccurred())
// Should contain the expected content
Expect(string(contents)).To(ContainSubstring("We're no strangers to love"))
})
})
Context("when reading a non-existent file", func() {
It("returns an error", func() {
_, err := UTF8ReadFile("../../tests/fixtures/nonexistent.lrc")
Expect(err).To(HaveOccurred())
})
})
})

26
utils/limiter.go Normal file
View File

@@ -0,0 +1,26 @@
package utils
import (
"cmp"
"sync"
"time"
"golang.org/x/time/rate"
)
// Limiter is a rate limiter that allows a function to be executed at most once per ID and per interval.
type Limiter struct {
Interval time.Duration
sm sync.Map
}
// Do executes the provided function `f` if the rate limiter for the given `id` allows it.
// It uses the interval specified in the Limiter struct or defaults to 1 minute if not set.
func (m *Limiter) Do(id string, f func()) {
interval := cmp.Or(
m.Interval,
time.Minute, // Default every 1 minute
)
limiter, _ := m.sm.LoadOrStore(id, &rate.Sometimes{Interval: interval})
limiter.(*rate.Sometimes).Do(f)
}

100
utils/merge/merge_fs.go Normal file
View File

@@ -0,0 +1,100 @@
package merge
import (
"cmp"
"errors"
"io"
"io/fs"
"maps"
"slices"
)
// FS implements a simple merged fs.FS, that can combine a Base FS with an Overlay FS. The semantics are:
// - Files from the Overlay FS will override files with the same name in the Base FS
// - Directories are combined, with priority for the Overlay FS over the Base FS for files with matching names
type FS struct {
Base fs.FS
Overlay fs.FS
}
func (m FS) Open(name string) (fs.File, error) {
file, err := m.Overlay.Open(name)
if err != nil {
return m.Base.Open(name)
}
info, err := file.Stat()
if err != nil {
_ = file.Close()
return nil, err
}
overlayDirFile, ok := file.(fs.ReadDirFile)
if !info.IsDir() || !ok {
return file, nil
}
baseDir, _ := m.Base.Open(name)
defer func() {
_ = baseDir.Close()
_ = file.Close()
}()
baseDirFile, ok := baseDir.(fs.ReadDirFile)
if !ok {
return nil, fs.ErrInvalid
}
return m.mergeDirs(name, info, baseDirFile, overlayDirFile)
}
func (m FS) mergeDirs(name string, info fs.FileInfo, baseDir fs.ReadDirFile, overlayDir fs.ReadDirFile) (fs.File, error) {
baseFiles, err := baseDir.ReadDir(-1)
if err != nil {
return nil, err
}
overlayFiles, err := overlayDir.ReadDir(-1)
if err != nil {
overlayFiles = nil
}
merged := map[string]fs.DirEntry{}
for _, f := range baseFiles {
merged[f.Name()] = f
}
for _, f := range overlayFiles {
merged[f.Name()] = f
}
it := maps.Values(merged)
entries := slices.SortedFunc(it, func(i, j fs.DirEntry) int { return cmp.Compare(i.Name(), j.Name()) })
return &mergedDir{
name: name,
info: info,
entries: entries,
}, nil
}
type mergedDir struct {
name string
info fs.FileInfo
entries []fs.DirEntry
pos int
}
var _ fs.ReadDirFile = (*mergedDir)(nil)
func (d *mergedDir) ReadDir(count int) ([]fs.DirEntry, error) {
if d.pos >= len(d.entries) && count > 0 {
return nil, io.EOF
}
if count <= 0 || count > len(d.entries)-d.pos {
count = len(d.entries) - d.pos
}
entries := d.entries[d.pos : d.pos+count]
d.pos += count
return entries, nil
}
func (d *mergedDir) Close() error { return nil }
func (d *mergedDir) Stat() (fs.FileInfo, error) { return d.info, nil }
func (d *mergedDir) Read([]byte) (int, error) {
return 0, &fs.PathError{Op: "read", Path: d.name, Err: errors.New("is a directory")}
}

View File

@@ -0,0 +1,117 @@
package merge_test
import (
"io"
"io/fs"
"os"
"path/filepath"
"testing"
"github.com/navidrome/navidrome/utils/merge"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
func TestMergeFS(t *testing.T) {
RegisterFailHandler(Fail)
RunSpecs(t, "MergeFS Suite")
}
var _ = Describe("FS", func() {
var baseName, overlayName string
var mergedDir fs.FS
BeforeEach(func() {
baseName, _ = os.MkdirTemp("", "merge_fs_base_test")
overlayName, _ = os.MkdirTemp("", "merge_fs_overlay_test")
baseDir := os.DirFS(baseName)
overlayDir := os.DirFS(overlayName)
mergedDir = merge.FS{Base: baseDir, Overlay: overlayDir}
})
AfterEach(func() {
_ = os.RemoveAll(baseName)
_ = os.RemoveAll(overlayName)
})
It("reads from Base dir if not found in Overlay", func() {
_f(baseName, "a.json")
file, err := mergedDir.Open("a.json")
Expect(err).To(BeNil())
stat, err := file.Stat()
Expect(err).To(BeNil())
Expect(stat.Name()).To(Equal("a.json"))
})
It("reads overridden file", func() {
_f(baseName, "b.json", "original")
_f(baseName, "b.json", "overridden")
file, err := mergedDir.Open("b.json")
Expect(err).To(BeNil())
content, err := io.ReadAll(file)
Expect(err).To(BeNil())
Expect(string(content)).To(Equal("overridden"))
})
It("reads only files from Base if Overlay is empty", func() {
_f(baseName, "test.txt")
dir, err := mergedDir.Open(".")
Expect(err).To(BeNil())
list, err := dir.(fs.ReadDirFile).ReadDir(-1)
Expect(err).To(BeNil())
Expect(list).To(HaveLen(1))
Expect(list[0].Name()).To(Equal("test.txt"))
})
It("reads merged dirs", func() {
_f(baseName, "1111.txt")
_f(overlayName, "2222.json")
dir, err := mergedDir.Open(".")
Expect(err).To(BeNil())
list, err := dir.(fs.ReadDirFile).ReadDir(-1)
Expect(err).To(BeNil())
Expect(list).To(HaveLen(2))
Expect(list[0].Name()).To(Equal("1111.txt"))
Expect(list[1].Name()).To(Equal("2222.json"))
})
It("allows to seek to the beginning of the directory", func() {
_f(baseName, "1111.txt")
_f(baseName, "2222.txt")
_f(baseName, "3333.txt")
dir, err := mergedDir.Open(".")
Expect(err).To(BeNil())
list, _ := dir.(fs.ReadDirFile).ReadDir(2)
Expect(list).To(HaveLen(2))
Expect(list[0].Name()).To(Equal("1111.txt"))
Expect(list[1].Name()).To(Equal("2222.txt"))
list, _ = dir.(fs.ReadDirFile).ReadDir(2)
Expect(list).To(HaveLen(1))
Expect(list[0].Name()).To(Equal("3333.txt"))
})
})
func _f(dir, name string, content ...string) string {
path := filepath.Join(dir, name)
file, err := os.Create(path)
if err != nil {
panic(err)
}
if len(content) > 0 {
_, _ = file.WriteString(content[0])
}
_ = file.Close()
return path
}

12
utils/number/number.go Normal file
View File

@@ -0,0 +1,12 @@
package number
import (
"strconv"
"golang.org/x/exp/constraints"
)
func ParseInt[T constraints.Integer](s string) T {
r, _ := strconv.ParseInt(s, 10, 64)
return T(r)
}

View File

@@ -0,0 +1,31 @@
package number_test
import (
"testing"
"github.com/navidrome/navidrome/utils/number"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
func TestNumber(t *testing.T) {
RegisterFailHandler(Fail)
RunSpecs(t, "Number Suite")
}
var _ = Describe("number package", func() {
Describe("ParseInt", func() {
It("should parse a string into an int", func() {
Expect(number.ParseInt[int64]("123")).To(Equal(int64(123)))
})
It("should parse a string into an int32", func() {
Expect(number.ParseInt[int32]("123")).To(Equal(int32(123)))
})
It("should parse a string into an int64", func() {
Expect(number.ParseInt[int]("123")).To(Equal(123))
})
It("should parse a string into an uint", func() {
Expect(number.ParseInt[uint]("123")).To(Equal(uint(123)))
})
})
})

176
utils/pl/pipelines.go Normal file
View File

@@ -0,0 +1,176 @@
// Package pl implements some Data Pipeline helper functions.
// Reference: https://medium.com/amboss/applying-modern-go-concurrency-patterns-to-data-pipelines-b3b5327908d4#3a80
//
// See also:
//
// https://www.oreilly.com/library/view/concurrency-in-go/9781491941294/ch04.html#fano_fani
// https://www.youtube.com/watch?v=f6kdp27TYZs
// https://www.youtube.com/watch?v=QDDwwePbDtw
package pl
import (
"context"
"errors"
"sync"
"github.com/navidrome/navidrome/log"
"golang.org/x/sync/semaphore"
)
func Stage[In any, Out any](
ctx context.Context,
maxWorkers int,
inputChannel <-chan In,
fn func(context.Context, In) (Out, error),
) (chan Out, chan error) {
outputChannel := make(chan Out)
errorChannel := make(chan error)
limit := int64(maxWorkers)
sem1 := semaphore.NewWeighted(limit)
go func() {
defer close(outputChannel)
defer close(errorChannel)
for s := range ReadOrDone(ctx, inputChannel) {
if err := sem1.Acquire(ctx, 1); err != nil {
if !errors.Is(err, context.Canceled) {
log.Error(ctx, "Failed to acquire semaphore", err)
}
break
}
go func(s In) {
defer sem1.Release(1)
result, err := fn(ctx, s)
if err != nil {
if !errors.Is(err, context.Canceled) {
errorChannel <- err
}
} else {
outputChannel <- result
}
}(s)
}
// By using context.Background() here we are assuming the fn will stop when the context
// is canceled. This is required so we can wait for the workers to finish and avoid closing
// the outputChannel before they are done.
if err := sem1.Acquire(context.Background(), limit); err != nil {
log.Error(ctx, "Failed waiting for workers", err)
}
}()
return outputChannel, errorChannel
}
func Sink[In any](
ctx context.Context,
maxWorkers int,
inputChannel <-chan In,
fn func(context.Context, In) error,
) chan error {
results, errC := Stage(ctx, maxWorkers, inputChannel, func(ctx context.Context, in In) (bool, error) {
err := fn(ctx, in)
return false, err // Only err is important, results will be discarded
})
// Discard results
go func() {
for range ReadOrDone(ctx, results) {
}
}()
return errC
}
func Merge[T any](ctx context.Context, cs ...<-chan T) <-chan T {
var wg sync.WaitGroup
out := make(chan T)
output := func(c <-chan T) {
defer wg.Done()
for v := range ReadOrDone(ctx, c) {
select {
case out <- v:
case <-ctx.Done():
return
}
}
}
wg.Add(len(cs))
for _, c := range cs {
go output(c)
}
go func() {
wg.Wait()
close(out)
}()
return out
}
func SendOrDone[T any](ctx context.Context, out chan<- T, v T) {
select {
case out <- v:
case <-ctx.Done():
return
}
}
func ReadOrDone[T any](ctx context.Context, in <-chan T) <-chan T {
valStream := make(chan T)
go func() {
defer close(valStream)
for {
select {
case <-ctx.Done():
return
case v, ok := <-in:
if !ok {
return
}
select {
case valStream <- v:
case <-ctx.Done():
}
}
}
}()
return valStream
}
func Tee[T any](ctx context.Context, in <-chan T) (<-chan T, <-chan T) {
out1 := make(chan T)
out2 := make(chan T)
go func() {
defer close(out1)
defer close(out2)
for val := range ReadOrDone(ctx, in) {
var out1, out2 = out1, out2
for i := 0; i < 2; i++ {
select {
case <-ctx.Done():
case out1 <- val:
out1 = nil
case out2 <- val:
out2 = nil
}
}
}
}()
return out1, out2
}
func FromSlice[T any](ctx context.Context, in []T) <-chan T {
output := make(chan T, len(in))
for _, c := range in {
output <- c
}
close(output)
return output
}

168
utils/pl/pipelines_test.go Normal file
View File

@@ -0,0 +1,168 @@
package pl_test
import (
"context"
"errors"
"sync/atomic"
"testing"
"time"
"github.com/navidrome/navidrome/utils/pl"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
func TestPipeline(t *testing.T) {
RegisterFailHandler(Fail)
RunSpecs(t, "Pipeline Tests Suite")
}
var _ = Describe("Pipeline", func() {
Describe("Stage", func() {
Context("happy path", func() {
It("calls the 'transform' function and returns values and errors", func() {
inC := make(chan int, 4)
for i := 0; i < 4; i++ {
inC <- i
}
close(inC)
outC, errC := pl.Stage(context.Background(), 1, inC, func(ctx context.Context, i int) (int, error) {
if i%2 == 0 {
return 0, errors.New("even number")
}
return i * 2, nil
})
Expect(<-errC).To(MatchError("even number"))
Expect(<-outC).To(Equal(2))
Expect(<-errC).To(MatchError("even number"))
Expect(<-outC).To(Equal(6))
Eventually(outC).Should(BeClosed())
Eventually(errC).Should(BeClosed())
})
})
Context("Multiple workers", func() {
const maxWorkers = 2
const numJobs = 100
It("starts multiple workers, respecting the limit", func() {
inC := make(chan int, numJobs)
for i := 0; i < numJobs; i++ {
inC <- i
}
close(inC)
current := atomic.Int32{}
count := atomic.Int32{}
max := atomic.Int32{}
outC, _ := pl.Stage(context.Background(), maxWorkers, inC, func(ctx context.Context, in int) (int, error) {
defer current.Add(-1)
c := current.Add(1)
count.Add(1)
if c > max.Load() {
max.Store(c)
}
time.Sleep(10 * time.Millisecond) // Slow process
return 0, nil
})
// Discard output and wait for completion
for range outC {
}
Expect(count.Load()).To(Equal(int32(numJobs)))
Expect(current.Load()).To(Equal(int32(0)))
Expect(max.Load()).To(Equal(int32(maxWorkers)))
})
})
When("the context is canceled", func() {
It("closes its output", func() {
ctx, cancel := context.WithCancel(context.Background())
inC := make(chan int)
outC, errC := pl.Stage(ctx, 1, inC, func(ctx context.Context, i int) (int, error) {
return i, nil
})
cancel()
Eventually(outC).Should(BeClosed())
Eventually(errC).Should(BeClosed())
})
})
})
Describe("Merge", func() {
var in1, in2 chan int
BeforeEach(func() {
in1 = make(chan int, 4)
in2 = make(chan int, 4)
for i := 0; i < 4; i++ {
in1 <- i
in2 <- i + 4
}
close(in1)
close(in2)
})
When("ranging through the output channel", func() {
It("copies values from all input channels to its output channel", func() {
var values []int
for v := range pl.Merge(context.Background(), in1, in2) {
values = append(values, v)
}
Expect(values).To(ConsistOf(0, 1, 2, 3, 4, 5, 6, 7))
})
})
When("there's a blocked channel and the context is closed", func() {
It("closes its output", func() {
ctx, cancel := context.WithCancel(context.Background())
in3 := make(chan int)
out := pl.Merge(ctx, in1, in2, in3)
cancel()
Eventually(out).Should(BeClosed())
})
})
})
Describe("ReadOrDone", func() {
When("values are sent", func() {
It("copies them to its output channel", func() {
in := make(chan int)
out := pl.ReadOrDone(context.Background(), in)
for i := 0; i < 4; i++ {
in <- i
j := <-out
Expect(i).To(Equal(j))
}
close(in)
Eventually(out).Should(BeClosed())
})
})
When("the context is canceled", func() {
It("closes its output", func() {
ctx, cancel := context.WithCancel(context.Background())
in := make(chan int)
out := pl.ReadOrDone(ctx, in)
cancel()
Eventually(out).Should(BeClosed())
})
})
})
Describe("SendOrDone", func() {
When("out is unblocked", func() {
It("puts the value in the channel", func() {
out := make(chan int)
value := 1234
go pl.SendOrDone(context.Background(), out, value)
Eventually(out).Should(Receive(&value))
})
})
When("out is blocked", func() {
It("can be canceled by the context", func() {
ctx, cancel := context.WithCancel(context.Background())
out := make(chan int)
go pl.SendOrDone(ctx, out, 1234)
cancel()
Consistently(out).ShouldNot(Receive())
})
})
})
})

24
utils/random/number.go Normal file
View File

@@ -0,0 +1,24 @@
package random
import (
"crypto/rand"
"encoding/binary"
"math/big"
"golang.org/x/exp/constraints"
)
// Int64N returns a random int64 between 0 and max.
// This is a reimplementation of math/rand/v2.Int64N using a cryptographically secure random number generator.
func Int64N[T constraints.Integer](max T) int64 {
rnd, _ := rand.Int(rand.Reader, big.NewInt(int64(max)))
return rnd.Int64()
}
// Uint64 returns a random uint64.
// This is a reimplementation of math/rand/v2.Uint64 using a cryptographically secure random number generator.
func Uint64() uint64 {
buffer := make([]byte, 8)
_, _ = rand.Read(buffer)
return binary.BigEndian.Uint64(buffer)
}

View File

@@ -0,0 +1,24 @@
package random_test
import (
"testing"
"github.com/navidrome/navidrome/utils/random"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
func TestRandom(t *testing.T) {
RegisterFailHandler(Fail)
RunSpecs(t, "Random Suite")
}
var _ = Describe("number package", func() {
Describe("Int64N", func() {
It("should return a random int64", func() {
for i := 0; i < 10000; i++ {
Expect(random.Int64N(100)).To(BeNumerically("<", 100))
}
})
})
})

View File

@@ -0,0 +1,70 @@
package random
import (
"errors"
"slices"
)
// WeightedChooser allows to randomly choose an entry based on their weights
// (higher weight = higher chance of being chosen). Based on the subtraction method described in
// https://eli.thegreenplace.net/2010/01/22/weighted-random-generation-in-python/
type WeightedChooser[T any] struct {
entries []T
weights []int
totalWeight int
}
func NewWeightedChooser[T any]() *WeightedChooser[T] {
return &WeightedChooser[T]{}
}
func (w *WeightedChooser[T]) Add(value T, weight int) {
w.entries = append(w.entries, value)
w.weights = append(w.weights, weight)
w.totalWeight += weight
}
// Pick choose a random entry based on their weights, and removes it from the list
func (w *WeightedChooser[T]) Pick() (T, error) {
var empty T
if w.totalWeight == 0 {
return empty, errors.New("cannot choose from zero weight")
}
i, err := w.weightedChoice()
if err != nil {
return empty, err
}
entry := w.entries[i]
_ = w.Remove(i)
return entry, nil
}
func (w *WeightedChooser[T]) weightedChoice() (int, error) {
if len(w.entries) == 0 {
return 0, errors.New("cannot choose from empty list")
}
rnd := Int64N(w.totalWeight)
for i, weight := range w.weights {
rnd -= int64(weight)
if rnd < 0 {
return i, nil
}
}
return 0, errors.New("internal error - code should not reach this point")
}
func (w *WeightedChooser[T]) Remove(i int) error {
if i < 0 || i >= len(w.entries) {
return errors.New("index out of bounds")
}
w.totalWeight -= w.weights[i]
w.weights = slices.Delete(w.weights, i, i+1)
w.entries = slices.Delete(w.entries, i, i+1)
return nil
}
func (w *WeightedChooser[T]) Size() int {
return len(w.entries)
}

View File

@@ -0,0 +1,72 @@
package random
import (
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var _ = Describe("WeightedChooser", func() {
var w *WeightedChooser[int]
BeforeEach(func() {
w = NewWeightedChooser[int]()
for i := 0; i < 10; i++ {
w.Add(i, i+1)
}
})
It("selects and removes a random item", func() {
Expect(w.Size()).To(Equal(10))
_, err := w.Pick()
Expect(err).ToNot(HaveOccurred())
Expect(w.Size()).To(Equal(9))
})
It("removes items", func() {
Expect(w.Size()).To(Equal(10))
for i := 0; i < 10; i++ {
Expect(w.Remove(0)).To(Succeed())
}
Expect(w.Size()).To(Equal(0))
})
It("returns error if trying to remove an invalid index", func() {
Expect(w.Size()).To(Equal(10))
Expect(w.Remove(-1)).ToNot(Succeed())
Expect(w.Remove(10000)).ToNot(Succeed())
Expect(w.Size()).To(Equal(10))
})
It("returns the sole item", func() {
ws := NewWeightedChooser[string]()
ws.Add("a", 1)
Expect(ws.Pick()).To(Equal("a"))
})
It("returns all items from the list", func() {
for i := 0; i < 10; i++ {
Expect(w.Pick()).To(BeElementOf(0, 1, 2, 3, 4, 5, 6, 7, 8, 9))
}
Expect(w.Size()).To(Equal(0))
})
It("fails when trying to choose from empty set", func() {
w = NewWeightedChooser[int]()
w.Add(1, 1)
w.Add(2, 1)
Expect(w.Pick()).To(BeElementOf(1, 2))
Expect(w.Pick()).To(BeElementOf(1, 2))
_, err := w.Pick()
Expect(err).To(HaveOccurred())
})
It("chooses based on weights", func() {
counts := [10]int{}
for i := 0; i < 200000; i++ {
c, _ := w.weightedChoice()
counts[c] = counts[c] + 1
}
for i := 0; i < 9; i++ {
Expect(counts[i]).To(BeNumerically("<", counts[i+1]))
}
})
})

172
utils/req/req.go Normal file
View File

@@ -0,0 +1,172 @@
package req
import (
"errors"
"fmt"
"net/http"
"strconv"
"strings"
"time"
"github.com/navidrome/navidrome/log"
)
type Values struct {
*http.Request
}
func Params(r *http.Request) *Values {
return &Values{r}
}
var (
ErrMissingParam = errors.New("missing parameter")
ErrInvalidParam = errors.New("invalid parameter")
)
func newError(err error, param string) error {
return fmt.Errorf("%w: '%s'", err, param)
}
func (r *Values) String(param string) (string, error) {
v := r.URL.Query().Get(param)
if v == "" {
return "", newError(ErrMissingParam, param)
}
return v, nil
}
func (r *Values) StringPtr(param string) *string {
var v *string
if _, exists := r.URL.Query()[param]; exists {
s := r.URL.Query().Get(param)
v = &s
}
return v
}
func (r *Values) BoolPtr(param string) *bool {
var v *bool
if _, exists := r.URL.Query()[param]; exists {
s := r.URL.Query().Get(param)
b := strings.Contains("/true/on/1/", "/"+strings.ToLower(s)+"/")
v = &b
}
return v
}
func (r *Values) StringOr(param, def string) string {
v, _ := r.String(param)
if v == "" {
return def
}
return v
}
func (r *Values) Strings(param string) ([]string, error) {
values := r.URL.Query()[param]
if len(values) == 0 {
return nil, newError(ErrMissingParam, param)
}
return values, nil
}
func (r *Values) TimeOr(param string, def time.Time) time.Time {
v, _ := r.String(param)
if v == "" || v == "-1" {
return def
}
value, err := strconv.ParseInt(v, 10, 64)
if err != nil {
return def
}
t := time.UnixMilli(value)
if t.Before(time.Date(1970, time.January, 2, 0, 0, 0, 0, time.UTC)) {
return def
}
return t
}
func (r *Values) Times(param string) ([]time.Time, error) {
pStr, err := r.Strings(param)
if err != nil {
return nil, err
}
times := make([]time.Time, len(pStr))
for i, t := range pStr {
ti, err := strconv.ParseInt(t, 10, 64)
if err != nil {
log.Warn(r.Context(), "Ignoring invalid time param", "time", t, err)
times[i] = time.Now()
continue
}
times[i] = time.UnixMilli(ti)
}
return times, nil
}
func (r *Values) Int64(param string) (int64, error) {
v, err := r.String(param)
if err != nil {
return 0, err
}
value, err := strconv.ParseInt(v, 10, 64)
if err != nil {
return 0, fmt.Errorf("%w '%s': expected integer, got '%s'", ErrInvalidParam, param, v)
}
return value, nil
}
func (r *Values) Int(param string) (int, error) {
v, err := r.Int64(param)
if err != nil {
return 0, err
}
return int(v), nil
}
func (r *Values) IntOr(param string, def int) int {
v, err := r.Int64(param)
if err != nil {
return def
}
return int(v)
}
func (r *Values) Int64Or(param string, def int64) int64 {
v, err := r.Int64(param)
if err != nil {
return def
}
return v
}
func (r *Values) Ints(param string) ([]int, error) {
pStr, err := r.Strings(param)
if err != nil {
return nil, err
}
ints := make([]int, 0, len(pStr))
for _, s := range pStr {
i, err := strconv.ParseInt(s, 10, 64)
if err == nil {
ints = append(ints, int(i))
}
}
return ints, nil
}
func (r *Values) Bool(param string) (bool, error) {
v, err := r.String(param)
if err != nil {
return false, err
}
return strings.Contains("/true/on/1/", "/"+strings.ToLower(v)+"/"), nil
}
func (r *Values) BoolOr(param string, def bool) bool {
v, err := r.Bool(param)
if err != nil {
return def
}
return v
}

277
utils/req/req_test.go Normal file
View File

@@ -0,0 +1,277 @@
package req_test
import (
"fmt"
"net/http/httptest"
"testing"
"time"
"github.com/navidrome/navidrome/utils/req"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
func TestUtils(t *testing.T) {
RegisterFailHandler(Fail)
RunSpecs(t, "Request Helpers Suite")
}
var _ = Describe("Request Helpers", func() {
var r *req.Values
Describe("ParamString", func() {
BeforeEach(func() {
r = req.Params(httptest.NewRequest("GET", "/ping?a=123", nil))
})
It("returns param as string", func() {
Expect(r.String("a")).To(Equal("123"))
})
It("returns empty string if param does not exist", func() {
v, err := r.String("NON_EXISTENT_PARAM")
Expect(err).To(MatchError(req.ErrMissingParam))
Expect(err.Error()).To(ContainSubstring("NON_EXISTENT_PARAM"))
Expect(v).To(BeEmpty())
})
})
Describe("ParamStringDefault", func() {
BeforeEach(func() {
r = req.Params(httptest.NewRequest("GET", "/ping?a=123", nil))
})
It("returns param as string", func() {
Expect(r.StringOr("a", "default_value")).To(Equal("123"))
})
It("returns default string if param does not exist", func() {
Expect(r.StringOr("xx", "default_value")).To(Equal("default_value"))
})
})
Describe("ParamStrings", func() {
BeforeEach(func() {
r = req.Params(httptest.NewRequest("GET", "/ping?a=123&a=456", nil))
})
It("returns all param occurrences as []string", func() {
Expect(r.Strings("a")).To(Equal([]string{"123", "456"}))
})
It("returns empty array if param does not exist", func() {
v, err := r.Strings("xx")
Expect(err).To(MatchError(req.ErrMissingParam))
Expect(v).To(BeEmpty())
})
})
Describe("ParamTime", func() {
d := time.Date(2002, 8, 9, 12, 11, 13, 1000000, time.Local)
t := d.UnixMilli()
now := time.Now()
BeforeEach(func() {
r = req.Params(httptest.NewRequest("GET", fmt.Sprintf("/ping?t=%d&inv=abc", t), nil))
})
It("returns parsed time", func() {
Expect(r.TimeOr("t", now)).To(Equal(d))
})
It("returns default time if param does not exist", func() {
Expect(r.TimeOr("xx", now)).To(Equal(now))
})
It("returns default time if param is an invalid timestamp", func() {
Expect(r.TimeOr("inv", now)).To(Equal(now))
})
})
Describe("ParamTimes", func() {
d1 := time.Date(2002, 8, 9, 12, 11, 13, 1000000, time.Local)
d2 := time.Date(2002, 8, 9, 12, 13, 56, 0000000, time.Local)
t1 := d1.UnixMilli()
t2 := d2.UnixMilli()
BeforeEach(func() {
r = req.Params(httptest.NewRequest("GET", fmt.Sprintf("/ping?t=%d&t=%d", t1, t2), nil))
})
It("returns all param occurrences as []time.Time", func() {
Expect(r.Times("t")).To(Equal([]time.Time{d1, d2}))
})
It("returns empty string if param does not exist", func() {
v, err := r.Times("xx")
Expect(err).To(MatchError(req.ErrMissingParam))
Expect(v).To(BeEmpty())
})
It("returns current time as default if param is invalid", func() {
now := time.Now()
r = req.Params(httptest.NewRequest("GET", "/ping?t=null", nil))
times, err := r.Times("t")
Expect(err).ToNot(HaveOccurred())
Expect(times).To(HaveLen(1))
Expect(times[0]).To(BeTemporally(">=", now))
})
})
Describe("ParamInt", func() {
BeforeEach(func() {
r = req.Params(httptest.NewRequest("GET", "/ping?i=123&inv=123.45", nil))
})
Context("int", func() {
It("returns parsed int", func() {
Expect(r.IntOr("i", 999)).To(Equal(123))
})
It("returns default value if param does not exist", func() {
Expect(r.IntOr("xx", 999)).To(Equal(999))
})
It("returns default value if param is an invalid int", func() {
Expect(r.IntOr("inv", 999)).To(Equal(999))
})
It("returns error if param is an invalid int", func() {
_, err := r.Int("inv")
Expect(err).To(MatchError(req.ErrInvalidParam))
})
})
Context("int64", func() {
It("returns parsed int64", func() {
Expect(r.Int64Or("i", 999)).To(Equal(int64(123)))
})
It("returns default value if param does not exist", func() {
Expect(r.Int64Or("xx", 999)).To(Equal(int64(999)))
})
It("returns default value if param is an invalid int", func() {
Expect(r.Int64Or("inv", 999)).To(Equal(int64(999)))
})
It("returns error if param is an invalid int", func() {
_, err := r.Int64("inv")
Expect(err).To(MatchError(req.ErrInvalidParam))
})
})
})
Describe("ParamInts", func() {
BeforeEach(func() {
r = req.Params(httptest.NewRequest("GET", "/ping?i=123&i=456", nil))
})
It("returns array of occurrences found", func() {
Expect(r.Ints("i")).To(Equal([]int{123, 456}))
})
It("returns empty array if param does not exist", func() {
v, err := r.Ints("xx")
Expect(err).To(MatchError(req.ErrMissingParam))
Expect(v).To(BeEmpty())
})
})
Describe("ParamBool", func() {
Context("value is true", func() {
BeforeEach(func() {
r = req.Params(httptest.NewRequest("GET", "/ping?b=true&c=on&d=1&e=True", nil))
})
It("parses 'true'", func() {
Expect(r.BoolOr("b", false)).To(BeTrue())
})
It("parses 'on'", func() {
Expect(r.BoolOr("c", false)).To(BeTrue())
})
It("parses '1'", func() {
Expect(r.BoolOr("d", false)).To(BeTrue())
})
It("parses 'True'", func() {
Expect(r.BoolOr("e", false)).To(BeTrue())
})
})
Context("value is false", func() {
BeforeEach(func() {
r = req.Params(httptest.NewRequest("GET", "/ping?b=false&c=off&d=0", nil))
})
It("parses 'false'", func() {
Expect(r.BoolOr("b", true)).To(BeFalse())
})
It("parses 'off'", func() {
Expect(r.BoolOr("c", true)).To(BeFalse())
})
It("parses '0'", func() {
Expect(r.BoolOr("d", true)).To(BeFalse())
})
It("returns default value if param does not exist", func() {
Expect(r.BoolOr("xx", true)).To(BeTrue())
})
})
})
Describe("ParamStringPtr", func() {
BeforeEach(func() {
r = req.Params(httptest.NewRequest("GET", "/ping?a=123", nil))
})
It("returns pointer to string if param exists", func() {
ptr := r.StringPtr("a")
Expect(ptr).ToNot(BeNil())
Expect(*ptr).To(Equal("123"))
})
It("returns nil if param does not exist", func() {
ptr := r.StringPtr("xx")
Expect(ptr).To(BeNil())
})
It("returns pointer to empty string if param exists but is empty", func() {
r = req.Params(httptest.NewRequest("GET", "/ping?a=", nil))
ptr := r.StringPtr("a")
Expect(ptr).ToNot(BeNil())
Expect(*ptr).To(Equal(""))
})
})
Describe("ParamBoolPtr", func() {
Context("value is true", func() {
BeforeEach(func() {
r = req.Params(httptest.NewRequest("GET", "/ping?b=true", nil))
})
It("returns pointer to true if param is 'true'", func() {
ptr := r.BoolPtr("b")
Expect(ptr).ToNot(BeNil())
Expect(*ptr).To(BeTrue())
})
})
Context("value is false", func() {
BeforeEach(func() {
r = req.Params(httptest.NewRequest("GET", "/ping?b=false", nil))
})
It("returns pointer to false if param is 'false'", func() {
ptr := r.BoolPtr("b")
Expect(ptr).ToNot(BeNil())
Expect(*ptr).To(BeFalse())
})
})
It("returns nil if param does not exist", func() {
ptr := r.BoolPtr("xx")
Expect(ptr).To(BeNil())
})
})
})

29
utils/run/run.go Normal file
View File

@@ -0,0 +1,29 @@
package run
import "golang.org/x/sync/errgroup"
// Sequentially runs the given functions sequentially,
// If any function returns an error, it stops the execution and returns that error.
// If all functions return nil, it returns nil.
func Sequentially(fs ...func() error) error {
for _, f := range fs {
if err := f(); err != nil {
return err
}
}
return nil
}
// Parallel runs the given functions in parallel,
// It waits for all functions to finish and returns the first error encountered.
func Parallel(fs ...func() error) func() error {
return func() error {
g := errgroup.Group{}
for _, f := range fs {
g.Go(func() error {
return f()
})
}
return g.Wait()
}
}

171
utils/run/run_test.go Normal file
View File

@@ -0,0 +1,171 @@
package run_test
import (
"errors"
"sync/atomic"
"testing"
"time"
"github.com/navidrome/navidrome/utils/run"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
func TestRun(t *testing.T) {
RegisterFailHandler(Fail)
RunSpecs(t, "Run Suite")
}
var _ = Describe("Sequentially", func() {
It("should return nil if no functions are provided", func() {
err := run.Sequentially()
Expect(err).To(BeNil())
})
It("should return nil if all functions succeed", func() {
err := run.Sequentially(
func() error { return nil },
func() error { return nil },
)
Expect(err).To(BeNil())
})
It("should return the error from the first failing function", func() {
expectedErr := errors.New("error in function 2")
err := run.Sequentially(
func() error { return nil },
func() error { return expectedErr },
func() error { return errors.New("error in function 3") },
)
Expect(err).To(Equal(expectedErr))
})
It("should not run functions after the first failing function", func() {
expectedErr := errors.New("error in function 1")
var runCount int
err := run.Sequentially(
func() error { runCount++; return expectedErr },
func() error { runCount++; return nil },
)
Expect(err).To(Equal(expectedErr))
Expect(runCount).To(Equal(1))
})
})
var _ = Describe("Parallel", func() {
It("should return a function that returns nil if no functions are provided", func() {
parallelFunc := run.Parallel()
err := parallelFunc()
Expect(err).To(BeNil())
})
It("should return a function that returns nil if all functions succeed", func() {
parallelFunc := run.Parallel(
func() error { return nil },
func() error { return nil },
func() error { return nil },
)
err := parallelFunc()
Expect(err).To(BeNil())
})
It("should return the first error encountered when functions fail", func() {
expectedErr := errors.New("parallel error")
parallelFunc := run.Parallel(
func() error { return nil },
func() error { return expectedErr },
func() error { return errors.New("another error") },
)
err := parallelFunc()
Expect(err).To(HaveOccurred())
// Note: We can't guarantee which error will be returned first in parallel execution
// but we can ensure an error is returned
})
It("should run all functions in parallel", func() {
var runCount atomic.Int32
sync := make(chan struct{})
parallelFunc := run.Parallel(
func() error {
runCount.Add(1)
<-sync
runCount.Add(-1)
return nil
},
func() error {
runCount.Add(1)
<-sync
runCount.Add(-1)
return nil
},
func() error {
runCount.Add(1)
<-sync
runCount.Add(-1)
return nil
},
)
// Run the parallel function in a goroutine
go func() {
Expect(parallelFunc()).To(Succeed())
}()
// Wait for all functions to start running
Eventually(func() int32 { return runCount.Load() }).Should(Equal(int32(3)))
// Release the functions to complete
close(sync)
// Wait for all functions to finish
Eventually(func() int32 { return runCount.Load() }).Should(Equal(int32(0)))
})
It("should wait for all functions to complete before returning", func() {
var completedCount atomic.Int32
parallelFunc := run.Parallel(
func() error {
completedCount.Add(1)
return nil
},
func() error {
completedCount.Add(1)
return nil
},
func() error {
completedCount.Add(1)
return nil
},
)
Expect(parallelFunc()).To(Succeed())
Expect(completedCount.Load()).To(Equal(int32(3)))
})
It("should return an error even if other functions are still running", func() {
expectedErr := errors.New("fast error")
var slowFunctionCompleted bool
parallelFunc := run.Parallel(
func() error {
return expectedErr // Return error immediately
},
func() error {
time.Sleep(50 * time.Millisecond) // Slow function
slowFunctionCompleted = true
return nil
},
)
start := time.Now()
err := parallelFunc()
duration := time.Since(start)
Expect(err).To(HaveOccurred())
// Should wait for all functions to complete, even if one fails early
Expect(duration).To(BeNumerically(">=", 50*time.Millisecond))
Expect(slowFunctionCompleted).To(BeTrue())
})
})

View File

@@ -0,0 +1,69 @@
package singleton
import (
"fmt"
"reflect"
"sync"
"github.com/navidrome/navidrome/log"
)
var (
instances = map[string]interface{}{}
pending = map[string]chan struct{}{}
lock sync.RWMutex
)
func GetInstance[T any](constructor func() T) T {
var v T
name := reflect.TypeOf(v).String()
// First check with read lock
lock.RLock()
if instance, ok := instances[name]; ok {
defer lock.RUnlock()
return instance.(T)
}
lock.RUnlock()
// Now check if someone is already creating this type
lock.Lock()
// Check again with the write lock - someone might have created it
if instance, ok := instances[name]; ok {
lock.Unlock()
return instance.(T)
}
// Check if creation is pending
wait, isPending := pending[name]
if !isPending {
// We'll be the one creating it
pending[name] = make(chan struct{})
wait = pending[name]
}
lock.Unlock()
// If someone else is creating it, wait for them
if isPending {
<-wait // Wait for creation to complete
// Now it should be in the instances map
lock.RLock()
defer lock.RUnlock()
return instances[name].(T)
}
// We're responsible for creating the instance
newInstance := constructor()
// Store it and signal other goroutines
lock.Lock()
instances[name] = newInstance
close(wait) // Signal that creation is complete
delete(pending, name) // Clean up
log.Trace("Created new singleton", "type", name, "instance", fmt.Sprintf("%+v", newInstance))
lock.Unlock()
return newInstance
}

View File

@@ -0,0 +1,102 @@
package singleton_test
import (
"sync"
"sync/atomic"
"testing"
"github.com/navidrome/navidrome/model/id"
"github.com/navidrome/navidrome/utils/singleton"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
func TestSingleton(t *testing.T) {
RegisterFailHandler(Fail)
RunSpecs(t, "Singleton Suite")
}
var _ = Describe("GetInstance", func() {
type T struct{ id string }
var numInstancesCreated int
constructor := func() *T {
numInstancesCreated++
return &T{id: id.NewRandom()}
}
It("calls the constructor to create a new instance", func() {
instance := singleton.GetInstance(constructor)
Expect(numInstancesCreated).To(Equal(1))
Expect(instance).To(BeAssignableToTypeOf(&T{}))
})
It("does not call the constructor the next time", func() {
instance := singleton.GetInstance(constructor)
newInstance := singleton.GetInstance(constructor)
Expect(newInstance.id).To(Equal(instance.id))
Expect(numInstancesCreated).To(Equal(1))
})
It("makes a distinction between a type and its pointer", func() {
instance := singleton.GetInstance(constructor)
newInstance := singleton.GetInstance(func() T {
numInstancesCreated++
return T{id: id.NewRandom()}
})
Expect(instance).To(BeAssignableToTypeOf(&T{}))
Expect(newInstance).To(BeAssignableToTypeOf(T{}))
Expect(newInstance.id).ToNot(Equal(instance.id))
Expect(numInstancesCreated).To(Equal(2))
})
It("only calls the constructor once when called concurrently", func() {
// This test creates 80000 goroutines that call GetInstance concurrently. If the constructor is called more than once, the test will fail.
const numCallsToDo = 80000
var numCallsDone atomic.Uint32
// This WaitGroup is used to make sure all goroutines are ready before the test starts
prepare := sync.WaitGroup{}
prepare.Add(numCallsToDo)
// This WaitGroup is used to synchronize the start of all goroutines as simultaneous as possible
start := sync.WaitGroup{}
start.Add(1)
// This WaitGroup is used to wait for all goroutines to be done
done := sync.WaitGroup{}
done.Add(numCallsToDo)
numInstancesCreated = 0
for i := 0; i < numCallsToDo; i++ {
go func() {
// This is needed to make sure the test does not hang if it fails
defer GinkgoRecover()
// Wait for all goroutines to be ready
start.Wait()
instance := singleton.GetInstance(func() struct{ I int } {
numInstancesCreated++
return struct{ I int }{I: numInstancesCreated}
})
// Increment the number of calls done
numCallsDone.Add(1)
// Flag the main WaitGroup that this goroutine is done
done.Done()
// Make sure the instance we get is always the same one
Expect(instance.I).To(Equal(1))
}()
// Flag that this goroutine is ready to start
prepare.Done()
}
prepare.Wait() // Wait for all goroutines to be ready
start.Done() // Start all goroutines
done.Wait() // Wait for all goroutines to be done
Expect(numCallsDone.Load()).To(Equal(uint32(numCallsToDo)))
Expect(numInstancesCreated).To(Equal(1))
})
})

184
utils/slice/slice.go Normal file
View File

@@ -0,0 +1,184 @@
package slice
import (
"bufio"
"bytes"
"cmp"
"io"
"iter"
"slices"
"golang.org/x/exp/maps"
)
func Map[T any, R any](t []T, mapFunc func(T) R) []R {
r := make([]R, len(t))
for i, e := range t {
r[i] = mapFunc(e)
}
return r
}
func MapWithArg[I any, O any, A any](t []I, arg A, mapFunc func(A, I) O) []O {
return Map(t, func(e I) O {
return mapFunc(arg, e)
})
}
func Group[T any, K comparable](s []T, keyFunc func(T) K) map[K][]T {
m := map[K][]T{}
for _, item := range s {
k := keyFunc(item)
m[k] = append(m[k], item)
}
return m
}
func ToMap[T any, K comparable, V any](s []T, transformFunc func(T) (K, V)) map[K]V {
m := make(map[K]V, len(s))
for _, item := range s {
k, v := transformFunc(item)
m[k] = v
}
return m
}
func CompactByFrequency[T comparable](list []T) []T {
counters := make(map[T]int)
for _, item := range list {
counters[item]++
}
sorted := maps.Keys(counters)
slices.SortFunc(sorted, func(i, j T) int {
return cmp.Compare(counters[j], counters[i])
})
return sorted
}
func MostFrequent[T comparable](list []T) T {
var zero T
if len(list) == 0 {
return zero
}
counters := make(map[T]int)
var topItem T
var topCount int
for _, value := range list {
if value == zero {
continue
}
counters[value]++
if counters[value] > topCount {
topItem = value
topCount = counters[value]
}
}
return topItem
}
func Insert[T any](slice []T, value T, index int) []T {
return append(slice[:index], append([]T{value}, slice[index:]...)...)
}
func Remove[T any](slice []T, index int) []T {
return append(slice[:index], slice[index+1:]...)
}
func Move[T any](slice []T, srcIndex int, dstIndex int) []T {
value := slice[srcIndex]
return Insert(Remove(slice, srcIndex), value, dstIndex)
}
func Unique[T comparable](list []T) []T {
seen := make(map[T]struct{})
var result []T
for _, item := range list {
if _, ok := seen[item]; !ok {
seen[item] = struct{}{}
result = append(result, item)
}
}
return result
}
// LinesFrom returns a Seq that reads lines from the given reader
func LinesFrom(reader io.Reader) iter.Seq[string] {
return func(yield func(string) bool) {
scanner := bufio.NewScanner(reader)
scanner.Split(scanLines)
for scanner.Scan() {
if !yield(scanner.Text()) {
return
}
}
}
}
// From https://stackoverflow.com/a/41433698
func scanLines(data []byte, atEOF bool) (advance int, token []byte, err error) {
if atEOF && len(data) == 0 {
return 0, nil, nil
}
if i := bytes.IndexAny(data, "\r\n"); i >= 0 {
if data[i] == '\n' {
// We have a line terminated by single newline.
return i + 1, data[0:i], nil
}
advance = i + 1
if len(data) > i+1 && data[i+1] == '\n' {
advance += 1
}
return advance, data[0:i], nil
}
// If we're at EOF, we have a final, non-terminated line. Return it.
if atEOF {
return len(data), data, nil
}
// Request more data.
return 0, nil, nil
}
// CollectChunks collects chunks of n elements from the input sequence and return a Seq of chunks
func CollectChunks[T any](it iter.Seq[T], n int) iter.Seq[[]T] {
return func(yield func([]T) bool) {
s := make([]T, 0, n)
for x := range it {
s = append(s, x)
if len(s) >= n {
if !yield(s) {
return
}
s = make([]T, 0, n)
}
}
if len(s) > 0 {
yield(s)
}
}
}
// SeqFunc returns a Seq that iterates over the slice with the given mapping function
func SeqFunc[I, O any](s []I, f func(I) O) iter.Seq[O] {
return func(yield func(O) bool) {
for _, x := range s {
if !yield(f(x)) {
return
}
}
}
}
// Filter returns a new slice containing only the elements of s for which filterFunc returns true
func Filter[T any](s []T, filterFunc func(T) bool) []T {
var result []T
for _, item := range s {
if filterFunc(item) {
result = append(result, item)
}
}
return result
}

213
utils/slice/slice_test.go Normal file
View File

@@ -0,0 +1,213 @@
package slice_test
import (
"os"
"slices"
"strconv"
"testing"
"github.com/navidrome/navidrome/tests"
"github.com/navidrome/navidrome/utils/slice"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
func TestSlice(t *testing.T) {
tests.Init(t, false)
RegisterFailHandler(Fail)
RunSpecs(t, "Slice Suite")
}
var _ = Describe("Slice Utils", func() {
Describe("Map", func() {
It("returns empty slice for an empty input", func() {
mapFunc := func(v int) string { return strconv.Itoa(v * 2) }
result := slice.Map([]int{}, mapFunc)
Expect(result).To(BeEmpty())
})
It("returns a new slice with elements mapped", func() {
mapFunc := func(v int) string { return strconv.Itoa(v * 2) }
result := slice.Map([]int{1, 2, 3, 4}, mapFunc)
Expect(result).To(ConsistOf("2", "4", "6", "8"))
})
})
Describe("MapWithArg", func() {
It("returns empty slice for an empty input", func() {
mapFunc := func(a int, v int) string { return strconv.Itoa(a + v) }
result := slice.MapWithArg([]int{}, 10, mapFunc)
Expect(result).To(BeEmpty())
})
It("returns a new slice with elements mapped", func() {
mapFunc := func(a int, v int) string { return strconv.Itoa(a + v) }
result := slice.MapWithArg([]int{1, 2, 3, 4}, 10, mapFunc)
Expect(result).To(ConsistOf("11", "12", "13", "14"))
})
})
Describe("Group", func() {
It("returns empty map for an empty input", func() {
keyFunc := func(v int) int { return v % 2 }
result := slice.Group([]int{}, keyFunc)
Expect(result).To(BeEmpty())
})
It("groups by the result of the key function", func() {
keyFunc := func(v int) int { return v % 2 }
result := slice.Group([]int{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11}, keyFunc)
Expect(result).To(HaveLen(2))
Expect(result[0]).To(ConsistOf(2, 4, 6, 8, 10))
Expect(result[1]).To(ConsistOf(1, 3, 5, 7, 9, 11))
})
})
Describe("ToMap", func() {
It("returns empty map for an empty input", func() {
transformFunc := func(v int) (int, string) { return v, strconv.Itoa(v) }
result := slice.ToMap([]int{}, transformFunc)
Expect(result).To(BeEmpty())
})
It("returns a map with the result of the transform function", func() {
transformFunc := func(v int) (int, string) { return v * 2, strconv.Itoa(v * 2) }
result := slice.ToMap([]int{1, 2, 3, 4}, transformFunc)
Expect(result).To(HaveLen(4))
Expect(result).To(HaveKeyWithValue(2, "2"))
Expect(result).To(HaveKeyWithValue(4, "4"))
Expect(result).To(HaveKeyWithValue(6, "6"))
Expect(result).To(HaveKeyWithValue(8, "8"))
})
})
Describe("CompactByFrequency", func() {
It("returns empty slice for an empty input", func() {
Expect(slice.CompactByFrequency([]int{})).To(BeEmpty())
})
It("groups by frequency", func() {
Expect(slice.CompactByFrequency([]int{1, 2, 1, 2, 3, 2})).To(HaveExactElements(2, 1, 3))
})
})
Describe("MostFrequent", func() {
It("returns zero value if no arguments are passed", func() {
Expect(slice.MostFrequent([]int{})).To(BeZero())
})
It("returns the single item", func() {
Expect(slice.MostFrequent([]string{"123"})).To(Equal("123"))
})
It("returns the item that appeared more times", func() {
Expect(slice.MostFrequent([]string{"1", "2", "1", "2", "3", "2"})).To(Equal("2"))
})
It("ignores zero values", func() {
Expect(slice.MostFrequent([]int{0, 0, 0, 2, 2})).To(Equal(2))
})
})
Describe("Move", func() {
It("moves item to end of slice", func() {
Expect(slice.Move([]string{"1", "2", "3"}, 0, 2)).To(HaveExactElements("2", "3", "1"))
})
It("moves item to beginning of slice", func() {
Expect(slice.Move([]string{"1", "2", "3"}, 2, 0)).To(HaveExactElements("3", "1", "2"))
})
It("keeps item in same position if srcIndex == dstIndex", func() {
Expect(slice.Move([]string{"1", "2", "3"}, 1, 1)).To(HaveExactElements("1", "2", "3"))
})
})
Describe("Unique", func() {
It("returns empty slice for an empty input", func() {
Expect(slice.Unique([]int{})).To(BeEmpty())
})
It("returns the unique elements", func() {
Expect(slice.Unique([]int{1, 2, 1, 2, 3, 2})).To(HaveExactElements(1, 2, 3))
})
})
DescribeTable("LinesFrom",
func(path string, expected int) {
count := 0
file, _ := os.Open(path)
defer file.Close()
for _ = range slice.LinesFrom(file) {
count++
}
Expect(count).To(Equal(expected))
},
Entry("returns empty slice for an empty input", "tests/fixtures/empty.txt", 0),
Entry("returns the lines of a file", "tests/fixtures/playlists/pls1.m3u", 2),
Entry("returns empty if file does not exist", "tests/fixtures/NON-EXISTENT", 0),
)
DescribeTable("CollectChunks",
func(input []int, n int, expected [][]int) {
var result [][]int
for chunks := range slice.CollectChunks(slices.Values(input), n) {
result = append(result, chunks)
}
Expect(result).To(Equal(expected))
},
Entry("returns empty slice (nil) for an empty input", []int{}, 1, nil),
Entry("returns the slice in one chunk if len < chunkSize", []int{1, 2, 3}, 10, [][]int{{1, 2, 3}}),
Entry("breaks up the slice if len > chunkSize", []int{1, 2, 3, 4, 5}, 3, [][]int{{1, 2, 3}, {4, 5}}),
)
Describe("SeqFunc", func() {
It("returns empty slice for an empty input", func() {
it := slice.SeqFunc([]int{}, func(v int) int { return v })
result := slices.Collect(it)
Expect(result).To(BeEmpty())
})
It("returns a new slice with mapped elements", func() {
it := slice.SeqFunc([]int{1, 2, 3, 4}, func(v int) string { return strconv.Itoa(v * 2) })
result := slices.Collect(it)
Expect(result).To(ConsistOf("2", "4", "6", "8"))
})
})
Describe("Filter", func() {
It("returns empty slice for an empty input", func() {
filterFunc := func(v int) bool { return v > 0 }
result := slice.Filter([]int{}, filterFunc)
Expect(result).To(BeEmpty())
})
It("returns all elements when filter matches all", func() {
filterFunc := func(v int) bool { return v > 0 }
result := slice.Filter([]int{1, 2, 3, 4}, filterFunc)
Expect(result).To(HaveExactElements(1, 2, 3, 4))
})
It("returns empty slice when filter matches none", func() {
filterFunc := func(v int) bool { return v > 10 }
result := slice.Filter([]int{1, 2, 3, 4}, filterFunc)
Expect(result).To(BeEmpty())
})
It("returns only matching elements", func() {
filterFunc := func(v int) bool { return v%2 == 0 }
result := slice.Filter([]int{1, 2, 3, 4, 5, 6}, filterFunc)
Expect(result).To(HaveExactElements(2, 4, 6))
})
It("works with string slices", func() {
filterFunc := func(s string) bool { return len(s) > 3 }
result := slice.Filter([]string{"a", "abc", "abcd", "ab", "abcde"}, filterFunc)
Expect(result).To(HaveExactElements("abcd", "abcde"))
})
It("preserves order of elements", func() {
filterFunc := func(v int) bool { return v%2 == 1 }
result := slice.Filter([]int{9, 8, 7, 6, 5, 4, 3, 2, 1}, filterFunc)
Expect(result).To(HaveExactElements(9, 7, 5, 3, 1))
})
})
})

View File

@@ -0,0 +1,65 @@
package str
import (
"html"
"regexp"
"slices"
"strings"
"github.com/deluan/sanitize"
"github.com/microcosm-cc/bluemonday"
"github.com/navidrome/navidrome/conf"
)
var ignoredCharsRegex = regexp.MustCompile("[“”‘’'\"\\[({\\])},]")
var slashRemover = strings.NewReplacer("\\", " ", "/", " ")
func SanitizeStrings(text ...string) string {
// Concatenate all strings, removing extra spaces
sanitizedText := strings.Builder{}
for _, txt := range text {
sanitizedText.WriteString(strings.TrimSpace(txt))
sanitizedText.WriteByte(' ')
}
// Remove special symbols, accents, extra spaces and slashes
sanitizedStrings := slashRemover.Replace(Clear(sanitizedText.String()))
sanitizedStrings = sanitize.Accents(strings.ToLower(sanitizedStrings))
sanitizedStrings = ignoredCharsRegex.ReplaceAllString(sanitizedStrings, "")
fullText := strings.Fields(sanitizedStrings)
// Remove duplicated words
slices.Sort(fullText)
fullText = slices.Compact(fullText)
// Returns the sanitized text as a single string
return strings.Join(fullText, " ")
}
var policy = bluemonday.UGCPolicy()
func SanitizeText(text string) string {
s := policy.Sanitize(text)
return html.UnescapeString(s)
}
func SanitizeFieldForSorting(originalValue string) string {
v := strings.TrimSpace(sanitize.Accents(originalValue))
return Clear(strings.ToLower(v))
}
func SanitizeFieldForSortingNoArticle(originalValue string) string {
v := strings.TrimSpace(sanitize.Accents(originalValue))
return Clear(strings.ToLower(strings.TrimSpace(RemoveArticle(v))))
}
func RemoveArticle(name string) string {
articles := strings.Split(conf.Server.IgnoredArticles, " ")
for _, a := range articles {
n := strings.TrimPrefix(name, a+" ")
if n != name {
return n
}
}
return name
}

View File

@@ -0,0 +1,109 @@
package str_test
import (
"github.com/navidrome/navidrome/conf"
"github.com/navidrome/navidrome/utils/str"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var _ = Describe("Sanitize Strings", func() {
Describe("SanitizeStrings", func() {
It("returns all lowercase chars", func() {
Expect(str.SanitizeStrings("Some Text")).To(Equal("some text"))
})
It("removes accents", func() {
Expect(str.SanitizeStrings("Quintão")).To(Equal("quintao"))
})
It("remove extra spaces", func() {
Expect(str.SanitizeStrings(" some text ", "text some")).To(Equal("some text"))
})
It("remove duplicated words", func() {
Expect(str.SanitizeStrings("legião urbana", "urbana legiÃo")).To(Equal("legiao urbana"))
})
It("remove symbols", func() {
Expect(str.SanitizeStrings("Toms Diner ' “40” A")).To(Equal("40 a diner toms"))
})
It("remove opening brackets", func() {
Expect(str.SanitizeStrings("[Five Years]")).To(Equal("five years"))
})
It("remove slashes", func() {
Expect(str.SanitizeStrings("folder/file\\yyyy")).To(Equal("file folder yyyy"))
})
It("normalizes utf chars", func() {
// These uses different types of hyphens
Expect(str.SanitizeStrings("k—os", "kos")).To(Equal("k-os"))
})
It("remove commas", func() {
// This is specially useful for handling cases where the Sort field uses comma.
// It reduces the size of the resulting string, thus reducing the size of the DB table and indexes.
Expect(str.SanitizeStrings("Bob Marley", "Marley, Bob")).To(Equal("bob marley"))
})
})
Describe("SanitizeFieldForSorting", func() {
BeforeEach(func() {
conf.Server.IgnoredArticles = "The O"
})
It("sanitize accents", func() {
Expect(str.SanitizeFieldForSorting("Céu")).To(Equal("ceu"))
})
It("removes articles", func() {
Expect(str.SanitizeFieldForSorting("The Beatles")).To(Equal("the beatles"))
})
It("removes accented articles", func() {
Expect(str.SanitizeFieldForSorting("Õ Blésq Blom")).To(Equal("o blesq blom"))
})
})
Describe("SanitizeFieldForSortingNoArticle", func() {
BeforeEach(func() {
conf.Server.IgnoredArticles = "The O"
})
It("sanitize accents", func() {
Expect(str.SanitizeFieldForSortingNoArticle("Céu")).To(Equal("ceu"))
})
It("removes articles", func() {
Expect(str.SanitizeFieldForSortingNoArticle("The Beatles")).To(Equal("beatles"))
})
It("removes accented articles", func() {
Expect(str.SanitizeFieldForSortingNoArticle("Õ Blésq Blom")).To(Equal("blesq blom"))
})
})
Describe("RemoveArticle", func() {
Context("Empty articles list", func() {
BeforeEach(func() {
conf.Server.IgnoredArticles = ""
})
It("returns empty if string is empty", func() {
Expect(str.RemoveArticle("")).To(BeEmpty())
})
It("returns same string", func() {
Expect(str.RemoveArticle("The Beatles")).To(Equal("The Beatles"))
})
})
Context("Default articles", func() {
BeforeEach(func() {
conf.Server.IgnoredArticles = "The El La Los Las Le Les Os As O A"
})
It("returns empty if string is empty", func() {
Expect(str.RemoveArticle("")).To(BeEmpty())
})
It("remove prefix article from string", func() {
Expect(str.RemoveArticle("Os Paralamas do Sucesso")).To(Equal("Paralamas do Sucesso"))
})
It("does not remove article if it is part of the first word", func() {
Expect(str.RemoveArticle("Thelonious Monk")).To(Equal("Thelonious Monk"))
})
})
})
})

64
utils/str/str.go Normal file
View File

@@ -0,0 +1,64 @@
package str
import (
"strings"
"unicode/utf8"
)
var utf8ToAscii = func() *strings.Replacer {
var utf8Map = map[string]string{
"'": ``,
`"`: `"〃ˮײ᳓″‶˶ʺ“”˝‟`,
"-": `‐–—−―`,
}
list := make([]string, 0, len(utf8Map)*2)
for ascii, utf8 := range utf8Map {
for _, r := range utf8 {
list = append(list, string(r), ascii)
}
}
return strings.NewReplacer(list...)
}()
func Clear(name string) string {
return utf8ToAscii.Replace(name)
}
func LongestCommonPrefix(list []string) string {
if len(list) == 0 {
return ""
}
for l := 0; l < len(list[0]); l++ {
c := list[0][l]
for i := 1; i < len(list); i++ {
if l >= len(list[i]) || list[i][l] != c {
return list[i][0:l]
}
}
}
return list[0]
}
// TruncateRunes truncates a string to a maximum number of runes, adding a suffix if truncated.
// The suffix is included in the rune count, so if maxRunes is 30 and suffix is "...", the actual
// string content will be truncated to fit within the maxRunes limit including the suffix.
func TruncateRunes(s string, maxRunes int, suffix string) string {
if utf8.RuneCountInString(s) <= maxRunes {
return s
}
suffixRunes := utf8.RuneCountInString(suffix)
truncateAt := maxRunes - suffixRunes
if truncateAt < 0 {
truncateAt = 0
}
runes := []rune(s)
if truncateAt >= len(runes) {
return s + suffix
}
return string(runes[:truncateAt]) + suffix
}

View File

@@ -0,0 +1,13 @@
package str_test
import (
"testing"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
func TestStrClear(t *testing.T) {
RegisterFailHandler(Fail)
RunSpecs(t, "Str Suite")
}

214
utils/str/str_test.go Normal file
View File

@@ -0,0 +1,214 @@
package str_test
import (
"github.com/navidrome/navidrome/utils/str"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var _ = Describe("String Utils", func() {
Describe("Clear", func() {
DescribeTable("replaces some Unicode chars with their equivalent ASCII",
func(input, expected string) {
Expect(str.Clear(input)).To(Equal(expected))
},
Entry("k-os", "kos", "k-os"),
Entry("kos", "kos", "k-os"),
Entry(`"Weird" Al Yankovic`, "“Weird” Al Yankovic", `"Weird" Al Yankovic`),
Entry("Single quotes", "Single quotes", "'Single' quotes"),
)
})
Describe("LongestCommonPrefix", func() {
It("finds the longest common prefix", func() {
Expect(str.LongestCommonPrefix(testPaths)).To(Equal("/Music/iTunes 1/iTunes Media/Music/"))
})
It("does NOT handle partial prefixes", func() {
albums := []string{
"/artist/albumOne",
"/artist/albumTwo",
}
Expect(str.LongestCommonPrefix(albums)).To(Equal("/artist/album"))
})
})
Describe("TruncateRunes", func() {
It("returns string unchanged if under max runes", func() {
Expect(str.TruncateRunes("hello", 10, "...")).To(Equal("hello"))
})
It("returns string unchanged if exactly at max runes", func() {
Expect(str.TruncateRunes("hello", 5, "...")).To(Equal("hello"))
})
It("truncates and adds suffix when over max runes", func() {
Expect(str.TruncateRunes("hello world", 8, "...")).To(Equal("hello..."))
})
It("handles unicode characters correctly", func() {
// 6 emoji characters, maxRunes=5, suffix="..." (3 runes)
// So content gets 5-3=2 runes
Expect(str.TruncateRunes("😀😁😂😃😄😅", 5, "...")).To(Equal("😀😁..."))
})
It("handles multi-byte UTF-8 characters", func() {
// Characters like é are single runes
Expect(str.TruncateRunes("Café au Lait", 5, "...")).To(Equal("Ca..."))
})
It("works with empty suffix", func() {
Expect(str.TruncateRunes("hello world", 5, "")).To(Equal("hello"))
})
It("accounts for suffix length in truncation", func() {
// maxRunes=10, suffix="..." (3 runes) -> leaves 7 runes for content
result := str.TruncateRunes("hello world this is long", 10, "...")
Expect(result).To(Equal("hello w..."))
// Verify total rune count is <= maxRunes
runeCount := len([]rune(result))
Expect(runeCount).To(BeNumerically("<=", 10))
})
It("handles very long suffix gracefully", func() {
// If suffix is longer than maxRunes, we still add it
// but the content will be truncated to 0
result := str.TruncateRunes("hello world", 5, "... (truncated)")
// Result will be just the suffix (since truncateAt=0)
Expect(result).To(Equal("... (truncated)"))
})
It("handles empty string", func() {
Expect(str.TruncateRunes("", 10, "...")).To(Equal(""))
})
It("uses custom suffix", func() {
// maxRunes=11, suffix=" [...]" (6 runes) -> content gets 5 runes
// "hello world" is 11 runes exactly, so we need a longer string
Expect(str.TruncateRunes("hello world extra", 11, " [...]")).To(Equal("hello [...]"))
})
DescribeTable("truncates at rune boundaries (not byte boundaries)",
func(input string, maxRunes int, suffix string, expected string) {
Expect(str.TruncateRunes(input, maxRunes, suffix)).To(Equal(expected))
},
Entry("ASCII", "abcdefghij", 5, "...", "ab..."),
Entry("Mixed ASCII and Unicode", "ab😀cd", 4, ".", "ab😀."),
Entry("All emoji", "😀😁😂😃😄", 3, "…", "😀😁…"),
Entry("Japanese", "こんにちは世界", 3, "…", "こん…"),
)
})
})
var testPaths = []string{
"/Music/iTunes 1/iTunes Media/Music/ABBA/Gold_ Greatest Hits/Dancing Queen.m4a",
"/Music/iTunes 1/iTunes Media/Music/ABBA/Gold_ Greatest Hits/Mamma Mia.m4a",
"/Music/iTunes 1/iTunes Media/Music/Art Blakey/A Night At Birdland, Vol. 1/01 Annoucement By Pee Wee Marquette.m4a",
"/Music/iTunes 1/iTunes Media/Music/Art Blakey/A Night At Birdland, Vol. 1/02 Split Kick.m4a",
"/Music/iTunes 1/iTunes Media/Music/As Frenéticas/As Frenéticas/Perigosa.m4a",
"/Music/iTunes 1/iTunes Media/Music/Bachman-Turner Overdrive/Gold/Down Down.m4a",
"/Music/iTunes 1/iTunes Media/Music/Bachman-Turner Overdrive/Gold/Hey You.m4a",
"/Music/iTunes 1/iTunes Media/Music/Bachman-Turner Overdrive/Gold/Hold Back The Water.m4a",
"/Music/iTunes 1/iTunes Media/Music/Belle And Sebastian/Write About Love/01 I Didn't See It Coming.m4a",
"/Music/iTunes 1/iTunes Media/Music/Belle And Sebastian/Write About Love/02 Come On Sister.m4a",
"/Music/iTunes 1/iTunes Media/Music/Black Eyed Peas/Elephunk/03 Let's Get Retarded.m4a",
"/Music/iTunes 1/iTunes Media/Music/Black Eyed Peas/Elephunk/04 Hey Mama.m4a",
"/Music/iTunes 1/iTunes Media/Music/Black Eyed Peas/Monkey Business/10 They Don't Want Music (Feat. James Brown).m4a",
"/Music/iTunes 1/iTunes Media/Music/Black Eyed Peas/The E.N.D/1-01 Boom Boom Pow.m4a",
"/Music/iTunes 1/iTunes Media/Music/Black Eyed Peas/Timeless/01 Mas Que Nada.m4a",
"/Music/iTunes 1/iTunes Media/Music/Blondie/Heart Of Glass/Heart Of Glass.m4a",
"/Music/iTunes 1/iTunes Media/Music/Bob Dylan/Nashville Skyline/06 Lay Lady Lay.m4a",
"/Music/iTunes 1/iTunes Media/Music/Botany/Feeling Today - EP/03 Waterparker.m4a",
"/Music/iTunes 1/iTunes Media/Music/Céu/CéU/06 10 Contados.m4a",
"/Music/iTunes 1/iTunes Media/Music/Chance/Six Through Ten/03 Forgive+Forget.m4a",
"/Music/iTunes 1/iTunes Media/Music/Clive Tanaka Y Su Orquesta/Jet Set Siempre 1°/03 Neu Chicago (Side A) [For Dance].m4a",
"/Music/iTunes 1/iTunes Media/Music/Compilations/Absolute Rock Classics/1-02 Smoke on the water.m4a",
"/Music/iTunes 1/iTunes Media/Music/Compilations/Almost Famous Soundtrack/10 Simple Man.m4a",
"/Music/iTunes 1/iTunes Media/Music/Compilations/Audio News - Rock'n' Roll Forever/01 Rock Around The Clock.m4a",
"/Music/iTunes 1/iTunes Media/Music/Compilations/Austin Powers_ International Man Of Mystery/01 The Magic Piper (Of Love).m4a",
"/Music/iTunes 1/iTunes Media/Music/Compilations/Austin Powers_ The Spy Who Shagged Me/04 American Woman.m4a",
"/Music/iTunes 1/iTunes Media/Music/Compilations/Back To Dance/03 Long Cool Woman In A Black Dress.m4a",
"/Music/iTunes 1/iTunes Media/Music/Compilations/Back To The 70's - O Album Da Década/03 American Pie.m4a",
"/Music/iTunes 1/iTunes Media/Music/Compilations/Bambolê/09 In The Mood.m4a",
"/Music/iTunes 1/iTunes Media/Music/Compilations/Bambolê - Volume II/03 Blue Moon.m4a",
"/Music/iTunes 1/iTunes Media/Music/Compilations/Big Brother Brasil 2004/04 I Will Survive.m4a",
"/Music/iTunes 1/iTunes Media/Music/Compilations/Collateral Soundtrack/03 Hands Of Time.m4a",
"/Music/iTunes 1/iTunes Media/Music/Compilations/Forrest Gump - The Soundtrack/1-12 California Dreamin'.m4a",
"/Music/iTunes 1/iTunes Media/Music/Compilations/Forrest Gump - The Soundtrack/1-16 Mrs. Robinson.m4a",
"/Music/iTunes 1/iTunes Media/Music/Compilations/Ghost World - Original Motion Picture Soundtrack/01 Jaan Pechechaan Ho.m4a",
"/Music/iTunes 1/iTunes Media/Music/Compilations/Grease [Original Soundtrack]/01 Grease.m4a",
"/Music/iTunes 1/iTunes Media/Music/Compilations/La Bamba/09 Summertime Blues.m4a",
"/Music/iTunes 1/iTunes Media/Music/Compilations/Pretty Woman/10 Oh Pretty Woman.m4a",
"/Music/iTunes 1/iTunes Media/Music/Compilations/Putumayo Presents African Groove/01 Saye Mogo Bana.m4a",
"/Music/iTunes 1/iTunes Media/Music/Compilations/Putumayo Presents Arabic Groove/02 Galbi.m4a",
"/Music/iTunes 1/iTunes Media/Music/Compilations/Putumayo Presents Asian Groove/03 Remember Tomorrow.m4a",
"/Music/iTunes 1/iTunes Media/Music/Compilations/Putumayo Presents Blues Lounge/01 Midnight Dream.m4a",
"/Music/iTunes 1/iTunes Media/Music/Compilations/Putumayo Presents Blues Lounge/03 Banal Reality.m4a",
"/Music/iTunes 1/iTunes Media/Music/Compilations/Putumayo Presents Blues Lounge/04 Parchman Blues.m4a",
"/Music/iTunes 1/iTunes Media/Music/Compilations/Putumayo Presents Blues Lounge/06 Run On.m4a",
"/Music/iTunes 1/iTunes Media/Music/Compilations/Putumayo Presents Brazilian Groove/01 Maria Moita.m4a",
"/Music/iTunes 1/iTunes Media/Music/Compilations/Putumayo Presents Brazilian Lounge/08 E Depois....m4a",
"/Music/iTunes 1/iTunes Media/Music/Compilations/Putumayo Presents Brazilian Lounge/11 Os Grilos.m4a",
"/Music/iTunes 1/iTunes Media/Music/Compilations/Putumayo Presents Euro Lounge/01 Un Simple Histoire.m4a",
"/Music/iTunes 1/iTunes Media/Music/Compilations/Putumayo Presents Euro Lounge/02 Limbe.m4a",
"/Music/iTunes 1/iTunes Media/Music/Compilations/Putumayo Presents Euro Lounge/05 Sempre Di Domenica.m4a",
"/Music/iTunes 1/iTunes Media/Music/Compilations/Putumayo Presents Euro Lounge/12 Voulez-Vous_.m4a",
"/Music/iTunes 1/iTunes Media/Music/Compilations/Putumayo Presents World Lounge/03 Santa Maria.m4a",
"/Music/iTunes 1/iTunes Media/Music/Compilations/Putumayo Presents_ A New Groove/02 Dirty Laundry.m4a",
"/Music/iTunes 1/iTunes Media/Music/Compilations/Putumayo Presents_ Blues Around the World/02 Canceriano Sem Lar (Clinica Tobias Blues).m4a",
"/Music/iTunes 1/iTunes Media/Music/Compilations/Putumayo Presents_ Euro Groove/03 Check In.m4a",
"/Music/iTunes 1/iTunes Media/Music/Compilations/Putumayo Presents_ World Groove/01 Attention.m4a",
"/Music/iTunes 1/iTunes Media/Music/Compilations/Saturday Night Fever/01 Stayin' Alive.m4a",
"/Music/iTunes 1/iTunes Media/Music/Compilations/Saturday Night Fever/03 Night Fever.m4a",
"/Music/iTunes 1/iTunes Media/Music/Compilations/The Best Air Guitar Album In The World... Ever!/2-06 Johnny B. Goode.m4a",
"/Music/iTunes 1/iTunes Media/Music/Compilations/The Full Monty - Soundtrack/02 You Sexy Thing.m4a",
"/Music/iTunes 1/iTunes Media/Music/Compilations/The Full Monty - Soundtrack/11 We Are Family.m4a",
"/Music/iTunes 1/iTunes Media/Music/Cut Copy/Zonoscope (Bonus Version)/10 Corner of the Sky.m4a",
"/Music/iTunes 1/iTunes Media/Music/David Bowie/Changesbowie/07 Diamond Dogs.m4a",
"/Music/iTunes 1/iTunes Media/Music/Douster & Savage Skulls/Get Rich or High Tryin' - EP/01 Bad Gal.m4a",
"/Music/iTunes 1/iTunes Media/Music/Elton John/Greatest Hits 1970-2002/1-04 Rocket Man (I Think It's Going to Be a Long, Long Time).m4a",
"/Music/iTunes 1/iTunes Media/Music/Elvis Presley/ELV1S 30 #1 Hits/02 Don't Be Cruel.m4a",
"/Music/iTunes 1/iTunes Media/Music/Eric Clapton/The Cream Of Clapton/03 I Feel Free.m4a",
"/Music/iTunes 1/iTunes Media/Music/Fleetwood Mac/The Very Best Of Fleetwood Mac/02 Don't Stop.m4a",
"/Music/iTunes 1/iTunes Media/Music/Françoise Hardy/Comment te dire adieu/Comment te dire adieu.m4a",
"/Music/iTunes 1/iTunes Media/Music/Games/That We Can Play - EP/01 Strawberry Skies.m4a",
"/Music/iTunes 1/iTunes Media/Music/Grand Funk Railroad/Collectors Series/The Loco-Motion.m4a",
"/Music/iTunes 1/iTunes Media/Music/Henry Mancini/The Pink Panther (Music from the Film Score)/The Pink Panther Theme.m4a",
"/Music/iTunes 1/iTunes Media/Music/Holy Ghost!/Do It Again - Single/01 Do It Again.m4a",
"/Music/iTunes 1/iTunes Media/Music/K.C. & The Sunshine Band/The Best of/03 I'm Your Boogie Man.m4a",
"/Music/iTunes 1/iTunes Media/Music/K.C. & The Sunshine Band/Unknown Album/Megamix (Thats The Way, Shake Your Booty, Get Down Tonight, Give It Up).m4a",
"/Music/iTunes 1/iTunes Media/Music/Kim Ann Foxman & Andy Butler/Creature - EP/01 Creature.m4a",
"/Music/iTunes 1/iTunes Media/Music/Nico/Chelsea Girl/01 The Fairest Of The Seasons.m4a",
"/Music/iTunes 1/iTunes Media/Music/oOoOO/oOoOO - EP/02 Burnout Eyess.m4a",
"/Music/iTunes 1/iTunes Media/Music/Peter Frampton/The Very Best of Peter Frampton/Baby, I Love Your Way.m4a",
"/Music/iTunes 1/iTunes Media/Music/Peter Frampton/The Very Best of Peter Frampton/Show Me The Way.m4a",
"/Music/iTunes 1/iTunes Media/Music/Raul Seixas/A Arte De Raul Seixas/03 Metamorfose Ambulante.m4a",
"/Music/iTunes 1/iTunes Media/Music/Raul Seixas/A Arte De Raul Seixas/18 Eu Nasci há 10 Mil Anos Atrás.m4a",
"/Music/iTunes 1/iTunes Media/Music/Rick James/Street Songs/Super Freak.m4a",
"/Music/iTunes 1/iTunes Media/Music/Rita Lee/Fruto Proibido/Agora Só Falta Você.m4a",
"/Music/iTunes 1/iTunes Media/Music/Rita Lee/Fruto Proibido/Esse Tal De Roque Enrow.m4a",
"/Music/iTunes 1/iTunes Media/Music/Roberto Carlos/Roberto Carlos 1966/05 Negro Gato.m4a",
"/Music/iTunes 1/iTunes Media/Music/SOHO/Goddess/02 Hippychick.m4a",
"/Music/iTunes 1/iTunes Media/Music/Stan Getz/Getz_Gilberto/05 Corcovado (Quiet Nights of Quiet Stars).m4a",
"/Music/iTunes 1/iTunes Media/Music/Steely Dan/Pretzel Logic/Rikki Don't Loose That Number.m4a",
"/Music/iTunes 1/iTunes Media/Music/Stevie Wonder/For Once In My Life/I Don't Know Why.m4a",
"/Music/iTunes 1/iTunes Media/Music/Teebs/Ardour/While You Doooo.m4a",
"/Music/iTunes 1/iTunes Media/Music/The Beatles/Magical Mystery Tour/08 Strawberry Fields Forever.m4a",
"/Music/iTunes 1/iTunes Media/Music/The Beatles/Past Masters, Vol. 1/10 Long Tall Sally.m4a",
"/Music/iTunes 1/iTunes Media/Music/The Beatles/Please Please Me/14 Twist And Shout.m4a",
"/Music/iTunes 1/iTunes Media/Music/The Beatles/Sgt. Pepper's Lonely Hearts Club Band/03 Lucy In The Sky With Diamonds.m4a",
"/Music/iTunes 1/iTunes Media/Music/The Black Crowes/Amorica/09 Wiser Time.m4a",
"/Music/iTunes 1/iTunes Media/Music/The Black Crowes/By Your Side/05 Only A Fool.m4a",
"/Music/iTunes 1/iTunes Media/Music/The Black Crowes/Shake Your Money Maker/04 Could I''ve Been So Blind.m4a",
"/Music/iTunes 1/iTunes Media/Music/The Black Crowes/The Southern Harmony And Musical Companion/01 Sting Me.m4a",
"/Music/iTunes 1/iTunes Media/Music/The Black Crowes/Three Snakes And One Charm/02 Good Friday.m4a",
"/Music/iTunes 1/iTunes Media/Music/The Doors/Strange Days (40th Anniversary Mixes)/01 Strange Days.m4a",
"/Music/iTunes 1/iTunes Media/Music/The Rolling Stones/Forty Licks/1-03 (I Can't Get No) Satisfaction.m4a",
"/Music/iTunes 1/iTunes Media/Music/The Velvet Underground/The Velvet Underground & Nico/02 I'm Waiting For The Man.m4a",
"/Music/iTunes 1/iTunes Media/Music/The Velvet Underground/The Velvet Underground & Nico/03 Femme Fatale.m4a",
"/Music/iTunes 1/iTunes Media/Music/The Velvet Underground/White Light_White Heat/04 Here She Comes Now.m4a",
"/Music/iTunes 1/iTunes Media/Music/The Who/Sings My Generation/My Generation.m4a",
"/Music/iTunes 1/iTunes Media/Music/Village People/The Very Best Of Village People/Macho Man.m4a",
"/Music/iTunes 1/iTunes Media/Music/Vondelpark/Sauna - EP/01 California Analog Dream.m4a",
"/Music/iTunes 1/iTunes Media/Music/War/Why Can't We Be Friends/Low Rider.m4a",
"/Music/iTunes 1/iTunes Media/Music/Yes/Fragile/01 Roundabout.m4a",
}

13
utils/time.go Normal file
View File

@@ -0,0 +1,13 @@
package utils
import "time"
func TimeNewest(times ...time.Time) time.Time {
newest := time.Time{}
for _, t := range times {
if t.After(newest) {
newest = t
}
}
return newest
}

28
utils/time_test.go Normal file
View File

@@ -0,0 +1,28 @@
package utils_test
import (
"time"
"github.com/navidrome/navidrome/utils"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var _ = Describe("TimeNewest", func() {
It("returns zero time when no times are provided", func() {
Expect(utils.TimeNewest()).To(Equal(time.Time{}))
})
It("returns the time when only one time is provided", func() {
t1 := time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC)
Expect(utils.TimeNewest(t1)).To(Equal(t1))
})
It("returns the newest time when multiple times are provided", func() {
t1 := time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC)
t2 := time.Date(2021, 1, 1, 0, 0, 0, 0, time.UTC)
t3 := time.Date(2019, 1, 1, 0, 0, 0, 0, time.UTC)
Expect(utils.TimeNewest(t1, t2, t3)).To(Equal(t2))
})
})

13
utils/utils_suite_test.go Normal file
View File

@@ -0,0 +1,13 @@
package utils
import (
"testing"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
func TestUtils(t *testing.T) {
RegisterFailHandler(Fail)
RunSpecs(t, "Utils Suite")
}